aboutsummaryrefslogtreecommitdiffstats
path: root/lib/dialyzer
diff options
context:
space:
mode:
authorLukas Larsson <[email protected]>2011-03-09 13:29:48 +0100
committerLukas Larsson <[email protected]>2011-03-09 13:29:48 +0100
commitb6637f53cc885c336e3001617d742d79216c80e3 (patch)
treeec92e4ebe5c2774a671ba5eba8032ca179339951 /lib/dialyzer
parent62e056af8c4fa058faa5087614c6b837a07f06e6 (diff)
parentdd14097487c33ac4d1ceed36b96070feb545219f (diff)
downloadotp-b6637f53cc885c336e3001617d742d79216c80e3.tar.gz
otp-b6637f53cc885c336e3001617d742d79216c80e3.tar.bz2
otp-b6637f53cc885c336e3001617d742d79216c80e3.zip
Merge branch 'aronisstav/dialyzer/dialyzer_tests/OTP-9116' into dev
* aronisstav/dialyzer/dialyzer_tests/OTP-9116: Increase timetrap of options1 suite Write output_plt even when plt_check is ok Create plt with erts, kernel and stdlib only Update test results as they currently appear in dev Major restructure of dialyzer's testsuite Add 'apps' option to the erlang interface Update spec file to work with new common test structure Test suites for Dialyzer
Diffstat (limited to 'lib/dialyzer')
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl6
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl7
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl13
-rw-r--r--lib/dialyzer/test/Makefile75
-rw-r--r--lib/dialyzer/test/README44
-rw-r--r--lib/dialyzer/test/callgraph_tests_SUITE.erl52
-rw-r--r--lib/dialyzer/test/callgraph_tests_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/callgraph_tests_SUITE_data/results/test_missing_functions3
-rw-r--r--lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t1.erl16
-rw-r--r--lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t2.erl16
-rw-r--r--lib/dialyzer/test/dialyzer.spec5
-rw-r--r--lib/dialyzer/test/dialyzer_common.erl377
-rw-r--r--lib/dialyzer/test/dialyzer_test_constants.hrl1
-rw-r--r--lib/dialyzer/test/file_utils.erl155
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE.erl184
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/array3
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/crash7
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/dict15
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/ets3
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/gb_sets0
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/inf_loop15
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/int3
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/mixed_opaque2
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/my_digraph0
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/my_queue7
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/opaque2
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/queue11
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/rec6
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/timer4
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/union5
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/results/wings11
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/array/array_use.erl15
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/crash/crash_1.erl55
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/dict/dict_use.erl83
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/ets/ets_use.erl17
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/gb_sets/gb_sets_rec.erl23
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/inf_loop1.erl172
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_adt.erl33
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_use.erl11
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_queue_adt.erl26
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_rec_adt.erl25
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_use.erl31
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/my_digraph/my_digraph_adt.erl51
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_adt.erl23
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_use.erl35
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_adt.erl9
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug1.erl17
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug2.erl13
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug3.erl19
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug4.erl21
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/queue/queue_use.erl66
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_adt.erl22
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_use.erl30
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/timer/timer_use.erl20
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_adt.erl19
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_use.erl16
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings.hrl205
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_dissolve.erl375
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge.erl243
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge_cmd.erl91
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_face.erl127
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_facemat.erl299
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_intl.hrl15
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_io.erl37
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_sel.erl68
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_shape.erl69
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_util.erl39
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_we.erl250
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis1.erl14
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis2.erl14
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis3.erl14
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis4.erl14
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis5.erl14
-rw-r--r--lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis6.erl14
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE.erl54
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Entries3
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Repository1
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Root1
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_bits.hrl43
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_compile.hrl42
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/results/compiler35
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_asm.erl358
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_block.erl601
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_bool.erl617
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_clean.erl232
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_dict.erl196
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_disasm.erl964
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_flatten.erl137
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_jump.erl477
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_listing.erl117
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.erl240
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.hrl12
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_type.erl551
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_validator.erl1022
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl.erl4169
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_clauses.erl409
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_inline.erl2762
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_trees.erl801
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/compile.erl1109
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lib.erl509
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lint.erl515
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.erl4911
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.hrl111
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_pp.erl430
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_scan.erl495
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/erl_bifs.erl486
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/rec_env.erl611
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_expand_pmod.erl425
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_attributes.erl212
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_expand.erl1026
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_codegen.erl1755
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_core.erl1320
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.erl1568
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.hrl77
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel_pp.erl444
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.erl448
-rw-r--r--lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.hrl25
-rw-r--r--lib/dialyzer/test/options2_tests_SUITE.erl52
-rw-r--r--lib/dialyzer/test/options2_tests_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/options2_tests_SUITE_data/results/kernel0
-rw-r--r--lib/dialyzer/test/options2_tests_SUITE_data/src/kernel/global.erl1999
-rw-r--r--lib/dialyzer/test/plt_tests_SUITE.erl21
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE.erl64
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/results/asn1106
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/results/inets59
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/results/mnesia34
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Makefile151
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Restrictions.txt55
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.app.src20
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.appup.src166
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_db.erl162
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_records.hrl96
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct.erl1904
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_check.erl5567
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber.erl1468
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl1357
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_per.erl1235
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen.erl1664
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber.erl1525
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl1568
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per.erl1190
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl1811
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_name.erl225
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser.yrl1175
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser2.erl2764
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_pretty_format.erl199
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_tok.erl351
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_value.erl330
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt.erl69
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin.erl2310
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl1869
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_check.erl333
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_driver_handler.erl108
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per.erl1609
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin.erl2182
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl2102
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_v1.erl1843
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_history.sgml100
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_latest.sgml100
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/Makefile178
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/ftp.erl1582
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.erl260
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.hrl127
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http_lib.erl745
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_handler.erl724
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_manager.erl542
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.erl596
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.hrl77
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor.erl176
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor_sup.erl118
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_conf.erl688
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_example.erl134
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_manager.erl1030
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_misc_sup.erl116
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_parse.erl348
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_request_handler.erl995
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_response.erl437
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_socket.erl381
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_sup.erl203
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_util.erl777
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.erl94
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.hrl65
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.app.src56
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.appup.src135
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.config2
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets_sup.erl158
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/jnets_httpd.hrl138
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_actions.erl92
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_alias.erl175
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.erl750
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.hrl27
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_dets.erl222
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_mnesia.erl276
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_plain.erl344
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_server.erl424
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_browser.erl214
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_cgi.erl694
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_dir.erl266
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_disk_log.erl405
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_esi.erl490
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_get.erl179
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_head.erl89
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_htaccess.erl1150
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_include.erl726
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_log.erl250
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_range.erl397
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_responsecontrol.erl337
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security.erl307
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security_server.erl728
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_trace.erl69
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/uri.erl349
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/Makefile137
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.app.src52
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.appup.src6
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.erl2191
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.hrl118
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_backup.erl195
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_bup.erl1169
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint.erl1284
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl39
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_controller.erl2012
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_dumper.erl1092
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_event.erl263
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag.erl1201
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_hash.erl118
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl127
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_index.erl380
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_kernel_sup.erl62
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_late_loader.erl95
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_lib.erl1278
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_loader.erl805
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_locker.erl1022
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_log.erl1019
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_monitor.erl776
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_recover.erl1175
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_registry.erl277
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_schema.erl2899
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_hook.erl271
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_sup.erl39
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sp.erl39
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_subscr.erl492
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sup.erl137
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_text.erl191
-rw-r--r--lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_tm.erl2173
-rw-r--r--lib/dialyzer/test/race_tests_SUITE.erl799
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args30
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args42
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args52
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args62
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args72
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args82
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow33
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow43
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow55
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race42
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race52
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race62
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double14
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double24
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_new0
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_param5
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/extract_translations5
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_one_write_two2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_two_write_one2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double42
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_one2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_two2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow43
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow52
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow62
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_no_race0
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_race2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions13
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_nested2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_pathsens2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_twice3
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_nested2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_pathsens2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_twice3
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_nested2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_pathsens2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions42
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions52
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions62
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_pathsens2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_rec2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_pathsens2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_rec2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_nested2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_twice3
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_no_race0
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_race2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module42
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module52
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module62
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module72
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module82
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param_inter_module2
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function42
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function52
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function62
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function72
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function82
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_try_catch3
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars10
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars102
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars110
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars122
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars132
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars142
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars152
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars162
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars172
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars180
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars190
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars200
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars210
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars222
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars42
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars52
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars62
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars72
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars82
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars92
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args1.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args2.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args3.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args4.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args5.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args6.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args7.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args8.erl16
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow1.erl20
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow2.erl26
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow3.erl31
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow4.erl31
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow5.erl34
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race1.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race2.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race3.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race4.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race5.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race6.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double1.erl28
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double2.erl28
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs1.erl18
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs2.erl18
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_new.erl15
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_param.erl26
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/extract_translations.erl294
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race1.erl33
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race2.erl37
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_one_write_two.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_two_write_one.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double1.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double2.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double3.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double4.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_one.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_two.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow1.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow2.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow3.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow4.erl29
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow5.erl12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow6.erl12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_no_race.erl24
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_race.erl35
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_nested.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_pathsens.erl32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_twice.erl30
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_nested.erl20
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_pathsens.erl29
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_twice.erl27
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_nested.erl21
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_pathsens.erl29
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions4.erl32
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions5.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions6.erl29
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules1.erl16
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules2.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules1_pathsens.erl26
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules2_pathsens.erl12
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules1_rec.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules2_rec.erl8
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules3.erl8
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules4.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules3_pathsens.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules4_pathsens.erl13
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules3_rec.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules4_rec.erl8
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules5.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules6.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules1_nested.erl14
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules2_nested.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules3_nested.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules1_twice.erl21
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules2_twice.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_no_race.erl13
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_race.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module1.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module2.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module3.erl16
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module4.erl14
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module5.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module6.erl14
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module7.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module8.erl13
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module10.erl16
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module9.erl11
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module11.erl27
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module12.erl14
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module13.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module14.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module15.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module16.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param.erl16
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module1.erl9
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module2.erl13
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function1.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function2.erl24
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function3.erl27
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function4.erl27
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function5.erl21
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function6.erl24
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function7.erl19
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function8.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_try_catch.erl25
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars1.erl17
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars10.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars11.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars12.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars13.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars14.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars15.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars16.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars17.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars18.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars19.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars2.erl18
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars20.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars21.erl23
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars22.erl27
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars3.erl18
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars4.erl18
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars5.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars6.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars7.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars8.erl22
-rw-r--r--lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars9.erl22
-rwxr-xr-xlib/dialyzer/test/remake9
-rw-r--r--lib/dialyzer/test/small_tests_SUITE.erl483
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/andalso_test0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/app_call3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/appmon_place0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/areq2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/atom_call3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/atom_widen3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/bs_fail_constr9
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/bs_utf80
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/cerl_hipeify4
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/comm_layer2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/compare14
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/confusing_warning2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/contract13
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/contract22
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/contract33
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/contract52
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/eqeq2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/ets_select0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/exhaust_case3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/failing_guard14
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/flatten2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/fun_app7
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/fun_ref_match2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/gencall4
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/gs_make0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/inf_loop24
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/letrec10
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/list_match2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/lzip0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/make_tuple3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/minus_minus0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/mod_info0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/my_filter0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/my_sofs3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/no_match4
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun20
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/non_existing2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/not_guard_crash0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/or_bug0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug20
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/overloaded13
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/port_info_test6
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/process_info_test0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/pubsub0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/receive12
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/record_construct7
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/record_pat2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/record_send_test2
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/record_test3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types10
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types20
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types30
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types40
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types50
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types60
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types70
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/toth0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/trec7
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/try10
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/tuple15
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/unsafe_beamcode_bug0
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/unused_cases4
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/unused_clauses3
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/results/zero_tuple5
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/app_call.erl17
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/appmon_place.erl71
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/areq.erl12
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/atom_call.erl14
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/atom_guard.erl9
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/atom_widen.erl24
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/bs_fail_constr.erl16
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/bs_utf8.erl27
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/cerl_hipeify.erl684
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_acceptor.erl120
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_connection.erl206
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.erl83
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.hrl30
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_logger.erl143
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port.erl240
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port_sup.erl90
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/compare1.erl21
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/confusing_warning.erl22
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/contract2.erl18
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/contract3.erl34
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/contract5.erl15
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/disj_norm_form.erl23
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/eqeq.erl16
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/ets_select.erl12
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/exhaust_case.erl24
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/failing_guard1.erl16
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/flatten.erl18
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/fun_app.erl42
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_match.erl21
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_record.erl17
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/gencall.erl12
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/gs_make.erl261
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/inf_loop2.erl23
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/letrec1.erl13
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/list_match.erl20
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/lzip.erl8
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/make_tuple.erl5
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/minus_minus.erl8
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/mod_info.erl5
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/my_filter.erl17
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/my_sofs.erl83
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/no_match.erl9
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun.erl20
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun2.erl20
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/non_existing.erl13
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/not_guard_crash.erl49
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/or_bug.erl24
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug.erl17
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug2.erl23
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/overloaded1.erl31
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/port_info_test.erl34
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/process_info_test.erl21
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_api.erl99
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_publish.erl50
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/receive1.erl17
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/record_construct.erl22
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/record_pat.erl19
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/record_send_test.erl33
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/record_test.erl24
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types1.erl10
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types2.erl12
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types3.erl15
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types4.erl13
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types5.erl13
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types6.erl17
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types7.erl13
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/refine_bug1.erl11
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/toth.erl99
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/trec.erl37
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/try1.erl27
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/tuple1.erl29
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/unsafe_beamcode_bug.erl15
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/unused_cases.erl41
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/unused_clauses.erl18
-rw-r--r--lib/dialyzer/test/small_tests_SUITE_data/src/zero_tuple.erl13
-rw-r--r--lib/dialyzer/test/user_tests_SUITE.erl78
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/results/broken_dialyzer0
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/results/gcpFlowControl2
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/results/qlc_error0
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/results/spvcOrig193
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/results/wsp_pdu25
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/broken_dialyzer.erl130
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/gcp.hrl166
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/gcpFlowControl.erl397
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/qlc_error.erl15
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/spvcOrig.erl3523
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/wdp.hrl97
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/wsp.hrl242
-rw-r--r--lib/dialyzer/test/user_tests_SUITE_data/src/wsp_pdu.erl5423
647 files changed, 135356 insertions, 3 deletions
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 86f1ba4696..8d61216b7a 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -189,6 +189,12 @@ init_opts_for_remove(Opts) ->
plt_common(#options{init_plts = [InitPlt]} = Opts, RemoveFiles, AddFiles) ->
case check_plt(Opts, RemoveFiles, AddFiles) of
ok ->
+ case Opts#options.output_plt of
+ none -> ok;
+ OutPlt ->
+ {ok, Binary} = file:read_file(InitPlt),
+ file:write_file(OutPlt, Binary)
+ end,
case Opts#options.report_mode of
quiet -> ok;
_ -> io:put_chars(" yes\n")
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
index f9baf36822..b68d6d190e 100644
--- a/lib/dialyzer/src/dialyzer_cl_parse.erl
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -22,7 +22,7 @@
%% Avoid warning for local function error/1 clashing with autoimported BIF.
-compile({no_auto_import,[error/1]}).
--export([start/0]).
+-export([start/0, get_lib_dir/1]).
-export([collect_args/1]). % used also by typer_options.erl
-include("dialyzer.hrl").
@@ -55,7 +55,7 @@ cl(["--add_to_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_add),
cl(T);
cl(["--apps"|T]) ->
- T1 = get_lib_dir(T, []),
+ T1 = get_lib_dir(T),
{Args, T2} = collect_args(T1),
append_var(dialyzer_options_files_rec, Args),
cl(T2);
@@ -304,6 +304,9 @@ common_options() ->
%%-----------------------------------------------------------------------
+get_lib_dir(Apps) ->
+ get_lib_dir(Apps, []).
+
get_lib_dir([H|T], Acc) ->
NewElem =
case code:lib_dir(list_to_atom(H)) of
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index b5cefd16ca..29e164628a 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -120,12 +120,18 @@ build_options([{OptName, undefined}|Rest], Options) when is_atom(OptName) ->
build_options(Rest, Options);
build_options([{OptionName, Value} = Term|Rest], Options) ->
case OptionName of
+ apps ->
+ OldValues = Options#options.files_rec,
+ AppDirs = get_app_dirs(Value),
+ assert_filenames(Term, AppDirs),
+ build_options(Rest, Options#options{files_rec = AppDirs ++ OldValues});
files ->
assert_filenames(Term, Value),
build_options(Rest, Options#options{files = Value});
files_rec ->
+ OldValues = Options#options.files_rec,
assert_filenames(Term, Value),
- build_options(Rest, Options#options{files_rec = Value});
+ build_options(Rest, Options#options{files_rec = Value ++ OldValues});
analysis_type ->
NewOptions =
case Value of
@@ -191,6 +197,11 @@ build_options([{OptionName, Value} = Term|Rest], Options) ->
build_options([], Options) ->
Options.
+get_app_dirs(Apps) when is_list(Apps) ->
+ dialyzer_cl_parse:get_lib_dir([atom_to_list(A) || A <- Apps]);
+get_app_dirs(Apps) ->
+ bad_option("Use a list of otp applications", Apps).
+
assert_filenames(Term, [FileName|Left]) when length(FileName) >= 0 ->
case filelib:is_file(FileName) orelse filelib:is_dir(FileName) of
true -> ok;
diff --git a/lib/dialyzer/test/Makefile b/lib/dialyzer/test/Makefile
new file mode 100644
index 0000000000..a8549278a5
--- /dev/null
+++ b/lib/dialyzer/test/Makefile
@@ -0,0 +1,75 @@
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+
+MODULES= \
+ callgraph_tests_SUITE \
+ opaque_tests_SUITE \
+ options1_tests_SUITE \
+ options2_tests_SUITE \
+ plt_tests_SUITE \
+ r9c_tests_SUITE \
+ race_tests_SUITE \
+ small_tests_SUITE \
+ user_tests_SUITE \
+ dialyzer_common\
+ file_utils
+
+ERL_FILES= $(MODULES:%=%.erl)
+
+TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
+
+INSTALL_PROGS= $(TARGET_FILES)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/dialyzer_test
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+
+ERL_MAKE_FLAGS +=
+ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/test_server/include
+
+EBIN = .
+
+EMAKEFILE=Emakefile
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+make_emakefile:
+ $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) $(MODULES) \
+ > $(EMAKEFILE)
+
+tests debug opt: make_emakefile
+ erl $(ERL_MAKE_FLAGS) -make
+
+clean:
+ rm -f $(EMAKEFILE)
+ rm -f $(TARGET_FILES) $(GEN_FILES)
+ rm -f core
+
+docs:
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+
+release_tests_spec: make_emakefile
+ $(INSTALL_DIR) $(RELSYSDIR)
+ $(INSTALL_DATA) $(EMAKEFILE) $(ERL_FILES) $(RELSYSDIR)
+ $(INSTALL_DATA) dialyzer.spec dialyzer_test_constants.hrl $(RELSYSDIR)
+ chmod -f -R u+w $(RELSYSDIR)
+ @tar cf - *_SUITE_data | (cd $(RELSYSDIR); tar xf -)
+
+release_docs_spec:
diff --git a/lib/dialyzer/test/README b/lib/dialyzer/test/README
new file mode 100644
index 0000000000..07340c7266
--- /dev/null
+++ b/lib/dialyzer/test/README
@@ -0,0 +1,44 @@
+-------------------------------
+To add test cases in any suite:
+-------------------------------
+
+ 1) If the test requires dialyzer to analyze a single file place it in the
+ suite's 'src' directory. If analysis of more files is needed place them
+ all in a new directory in suite's 'src' directory.
+
+ 2) Create a file with the same name as the test (if single file, omit the
+ extension else directory name) containing the expected result in suite's
+ 'result' directory.
+
+ 3) Run './remake <suite>', where <suite> is the suite's name omitting
+ "_tests_SUITE".
+
+----------------------
+To create a new suite:
+----------------------
+
+ 1) Create a directory with the suffix 'tests_SUITE_data'. The name should
+ describe the suite.
+
+ 2) In the suite's directory create subdirectories 'src' and 'results' as
+ well as a 'dialyzer_options' file with the following content:
+
+ {dialyzer_options, List}.
+ {time_limit, Limit}.
+
+ where:
+
+ List = a list of dialyzer options. Common case will be something
+ like [{warnings, Warnings}], where Warnings is a list of valid
+ '-W' prefixed dialyzer options without the 'W' prefix (e.g.
+ '-Wfoo' would be declared as [{warnings, [foo]}].
+ Limit = the amount of time each test case is allowed to run. Must be
+ bigger than the time it takes the most time-consuming test to
+ finish.
+
+ Any of these lines may be missing. Default options list is empty and
+ default time limit is 1 minute.
+
+ 3) Add tests as described in previous section.
+
+ 4) Add the resulting suite's name in the Makefile's MODULES variable.
diff --git a/lib/dialyzer/test/callgraph_tests_SUITE.erl b/lib/dialyzer/test/callgraph_tests_SUITE.erl
new file mode 100644
index 0000000000..6148adf971
--- /dev/null
+++ b/lib/dialyzer/test/callgraph_tests_SUITE.erl
@@ -0,0 +1,52 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(callgraph_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([callgraph_tests_SUITE_consistency/1, test_missing_functions/1]).
+
+suite() ->
+ [{timetrap, {minutes, 1}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, []}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [callgraph_tests_SUITE_consistency,test_missing_functions].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+callgraph_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+test_missing_functions(Config) ->
+ case dialyze(Config, test_missing_functions) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/callgraph_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/callgraph_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..50991c9bc5
--- /dev/null
+++ b/lib/dialyzer/test/callgraph_tests_SUITE_data/dialyzer_options
@@ -0,0 +1 @@
+{dialyzer_options, []}.
diff --git a/lib/dialyzer/test/callgraph_tests_SUITE_data/results/test_missing_functions b/lib/dialyzer/test/callgraph_tests_SUITE_data/results/test_missing_functions
new file mode 100644
index 0000000000..4150bdb7c0
--- /dev/null
+++ b/lib/dialyzer/test/callgraph_tests_SUITE_data/results/test_missing_functions
@@ -0,0 +1,3 @@
+
+t1.erl:16: Call to missing or unexported function t2:t2/1
+t2.erl:13: Call to missing or unexported function t1:t3/1
diff --git a/lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t1.erl b/lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t1.erl
new file mode 100644
index 0000000000..3b320e1ed4
--- /dev/null
+++ b/lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t1.erl
@@ -0,0 +1,16 @@
+%%%-------------------------------------------------------------------
+%%% File : t1.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 26 Jul 2006 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(t1).
+
+-export([t1/1, t2/1]).
+
+t1(X) ->
+ t2:t1(X).
+
+t2(X) ->
+ t2:t2(X).
diff --git a/lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t2.erl b/lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t2.erl
new file mode 100644
index 0000000000..5ac8aa328c
--- /dev/null
+++ b/lib/dialyzer/test/callgraph_tests_SUITE_data/src/test_missing_functions/t2.erl
@@ -0,0 +1,16 @@
+%%%-------------------------------------------------------------------
+%%% File : t2.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 26 Jul 2006 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(t2).
+
+-export([t1/1]).
+
+t1(X) ->
+ t1:t3(X) + t2(X).
+
+t2(X) ->
+ X + 1.
diff --git a/lib/dialyzer/test/dialyzer.spec b/lib/dialyzer/test/dialyzer.spec
new file mode 100644
index 0000000000..7499dbad1e
--- /dev/null
+++ b/lib/dialyzer/test/dialyzer.spec
@@ -0,0 +1,5 @@
+{alias, tests, "../dialyzer_test"}.
+
+{suites, tests, all}.
+
+{skip_cases, tests, small_tests_SUITE, cerl_hipeify, "Needs compiler in plt"}. \ No newline at end of file
diff --git a/lib/dialyzer/test/dialyzer_common.erl b/lib/dialyzer/test/dialyzer_common.erl
new file mode 100644
index 0000000000..5577405483
--- /dev/null
+++ b/lib/dialyzer/test/dialyzer_common.erl
@@ -0,0 +1,377 @@
+%%% File : dialyzer_common.erl
+%%% Author : Stavros Aronis <[email protected]>
+%%% Description : Generator and common infrastructure for simple dialyzer
+%%% test suites (some options, some input files or directories
+%%% and the relevant results).
+%%% Created : 11 Jun 2010 by Stavros Aronis <stavros@enjoy>
+
+-module(dialyzer_common).
+
+-export([check_plt/1, check/4, create_suite/1,
+ create_all_suites/0, new_tests/2]).
+
+-include_lib("kernel/include/file.hrl").
+
+-define(suite_suffix, "_tests_SUITE").
+-define(data_folder, "_data").
+-define(erlang_extension, ".erl").
+-define(output_file_mode, write).
+-define(dialyzer_option_file, "dialyzer_options").
+-define(input_files_directory, "src").
+-define(result_files_directory, "results").
+-define(plt_filename,"dialyzer_plt").
+-define(home_plt_filename,".dialyzer_plt").
+-define(plt_lockfile,"plt_lock").
+-define(required_modules, [erts, kernel, stdlib]).
+
+-record(suite, {suitename :: string(),
+ outputfile :: file:io_device(),
+ options :: options(),
+ testcases :: [testcase()]}).
+
+-record(options, {time_limit = 1 :: integer(),
+ dialyzer_options = [] :: dialyzer:dial_options()}).
+
+-type options() :: #options{}.
+-type testcase() :: {atom(), 'file' | 'dir'}.
+
+-spec check_plt(string()) -> ok.
+
+check_plt(OutDir) ->
+ io:format("Checking plt:"),
+ PltFilename = filename:join(OutDir, ?plt_filename),
+ case file:read_file_info(PltFilename) of
+ {ok, _} -> dialyzer_check_plt(PltFilename);
+ {error, _ } ->
+ io:format("No plt found in test run directory!"),
+ PltLockFile = filename:join(OutDir, ?plt_lockfile),
+ case file:read_file_info(PltLockFile) of
+ {ok, _} ->
+ explain_fail_with_lock(),
+ fail;
+ {error, _} ->
+ io:format("Locking plt generation."),
+ case file:open(PltLockFile,[?output_file_mode]) of
+ {ok, OutFile} ->
+ io:format(OutFile,"Locking plt generation.",[]),
+ file:close(OutFile);
+ {error, Reason} ->
+ io:format("Couldn't write lock file ~p.",[Reason]),
+ fail
+ end,
+ obtain_plt(PltFilename)
+ end
+ end.
+
+dialyzer_check_plt(PltFilename) ->
+ try dialyzer:run([{analysis_type, plt_check},
+ {init_plt, PltFilename}]) of
+ [] -> ok
+ catch
+ Class:Info ->
+ io:format("Failed. The error was: ~w\n~p",[Class, Info]),
+ io:format("A previously run dialyzer suite failed to generate"
+ " a correct plt."),
+ fail
+ end.
+
+explain_fail_with_lock() ->
+ io:format("Some other suite started creating a plt. It might not have"
+ " finished (Dialyzer's suites shouldn't run in parallel), or"
+ " it reached timeout and was killed (in which case"
+ " plt_timeout, defined in dialyzer_test_constants.hrl"
+ " should be increased), or it failed.").
+
+obtain_plt(PltFilename) ->
+ io:format("Obtaining plt:"),
+ HomeDir = os:getenv("HOME"),
+ HomePlt = filename:join(HomeDir, ?home_plt_filename),
+ io:format("Will try to use ~s as a starting point and add otp apps ~w.",
+ [HomePlt, ?required_modules]),
+ try dialyzer:run([{analysis_type, plt_add},
+ {apps, ?required_modules},
+ {output_plt, PltFilename},
+ {init_plt, HomePlt}]) of
+ [] ->
+ io:format("Successfully added everything!"),
+ ok
+ catch
+ Class:Reason ->
+ io:format("Failed. The error was: ~w\n~p",[Class, Reason]),
+ build_plt(PltFilename)
+ end.
+
+build_plt(PltFilename) ->
+ io:format("Building plt from scratch:"),
+ try dialyzer:run([{analysis_type, plt_build},
+ {apps, ?required_modules},
+ {output_plt, PltFilename}]) of
+ [] ->
+ io:format("Successfully created plt!"),
+ ok
+ catch
+ Class:Reason ->
+ io:format("Failed. The error was: ~w\n~p",[Class, Reason]),
+ fail
+ end.
+
+-spec check(atom(), dialyzer:dial_options(), string(), string()) ->
+ 'same' | {differ, [term()]}.
+
+check(TestCase, Opts, Dir, OutDir) ->
+ PltFilename = filename:join(OutDir, ?plt_filename),
+ SrcDir = filename:join(Dir, ?input_files_directory),
+ ResDir = filename:join(Dir, ?result_files_directory),
+ Filename = filename:join(SrcDir, atom_to_list(TestCase)),
+ Files =
+ case file_utils:file_type(Filename) of
+ {ok, 'directory'} ->
+ {ok, ListFiles} = file_utils:list_dir(Filename, ".erl",
+ false),
+ ListFiles;
+ {error, _} ->
+ FilenameErl = Filename ++ ".erl",
+ case file_utils:file_type(FilenameErl) of
+ {ok, 'regular'} -> [FilenameErl]
+ end
+ end,
+ ResFile = atom_to_list(TestCase),
+ NewResFile = filename:join(OutDir, ResFile),
+ OldResFile = filename:join(ResDir, ResFile),
+ ProperOpts = fix_options(Opts, Dir),
+ try dialyzer:run([{files, Files},{from, src_code},{init_plt, PltFilename},
+ {check_plt, false}|ProperOpts]) of
+ RawWarns ->
+ Warns = lists:sort([dialyzer:format_warning(W) || W <- RawWarns]),
+ case Warns of
+ [] -> ok;
+ _ ->
+ case file:open(NewResFile,[?output_file_mode]) of
+ {ok, OutFile} ->
+ io:format(OutFile,"\n~s",[Warns]),
+ file:close(OutFile);
+ Other -> erlang:error(Other)
+ end
+ end,
+ case file_utils:diff(NewResFile, OldResFile) of
+ 'same' -> file:delete(NewResFile),
+ 'same';
+ Any -> escape_strings(Any)
+ end
+ catch
+ Kind:Error -> {'dialyzer crashed', Kind, Error}
+ end.
+
+fix_options(Opts, Dir) ->
+ fix_options(Opts, Dir, []).
+
+fix_options([], _Dir, Acc) ->
+ Acc;
+fix_options([{pa, Path} | Rest], Dir, Acc) ->
+ case code:add_patha(filename:join(Dir, Path)) of
+ true -> fix_options(Rest, Dir, Acc);
+ {error, _} -> erlang:error("Bad directory for pa: " ++ Path)
+ end;
+fix_options([{DirOption, RelativeDirs} | Rest], Dir, Acc)
+ when DirOption =:= include_dirs ;
+ DirOption =:= files_rec ;
+ DirOption =:= files ->
+ ProperRelativeDirs = [filename:join(Dir,RDir) || RDir <- RelativeDirs],
+ fix_options(Rest, Dir, [{include_dirs, ProperRelativeDirs} | Acc]);
+fix_options([Opt | Rest], Dir, Acc) ->
+ fix_options(Rest, Dir, [Opt | Acc]).
+
+-spec new_tests(string(), [atom()]) -> [atom()].
+
+new_tests(Dirname, DeclaredTestcases) ->
+ SrcDir = filename:join(Dirname, ?input_files_directory),
+ get_testcases(SrcDir) -- DeclaredTestcases.
+
+get_testcases(Dirname) ->
+ {ok, Files} = file_utils:list_dir(Dirname, ".erl", true),
+ [list_to_atom(filename:basename(F,".erl")) || F <-Files].
+
+-spec create_all_suites() -> 'ok'.
+
+create_all_suites() ->
+ {ok, Cwd} = file:get_cwd(),
+ Suites = get_suites(Cwd),
+ lists:foreach(fun create_suite/1, Suites).
+
+escape_strings({differ,List}) ->
+ Map = fun({T,L,S}) -> {T,L,xmerl_lib:export_text(S)} end,
+ {differ, lists:keysort(3, lists:map(Map, List))}.
+
+-spec get_suites(file:filename()) -> [string()].
+
+get_suites(Dir) ->
+ case file:list_dir(Dir) of
+ {error, _} -> [];
+ {ok, Filenames} ->
+ FullFilenames = [filename:join(Dir, F) || F <-Filenames ],
+ Dirs = [suffix(filename:basename(F), "_tests_SUITE_data") ||
+ F <- FullFilenames,
+ file_utils:file_type(F) =:= {ok, 'directory'}],
+ [S || {yes, S} <- Dirs]
+ end.
+
+suffix(String, Suffix) ->
+ Index = string:rstr(String, Suffix),
+ case string:substr(String, Index) =:= Suffix of
+ true -> {yes, string:sub_string(String,1,Index-1)};
+ false -> no
+ end.
+
+-spec create_suite(string()) -> 'ok'.
+
+create_suite(SuiteName) ->
+ {ok, Cwd} = file:get_cwd(),
+ SuiteDirN = generate_suite_dir_from_name(Cwd, SuiteName),
+ OutputFile = generate_suite_file(Cwd, SuiteName),
+ {OptionsFileN, InputDirN} = check_neccessary_files(SuiteDirN),
+ generate_suite(SuiteName, OutputFile, OptionsFileN, InputDirN).
+
+generate_suite_dir_from_name(Cwd, SuiteName) ->
+ filename:join(Cwd, SuiteName ++ ?suite_suffix ++ ?data_folder).
+
+generate_suite_file(Cwd, SuiteName) ->
+ OutputFilename =
+ filename:join(Cwd, SuiteName ++ ?suite_suffix ++ ?erlang_extension),
+ case file:open(OutputFilename, [?output_file_mode]) of
+ {ok, IoDevice} -> IoDevice;
+ {error, _} = E -> exit({E, OutputFilename})
+ end.
+
+check_neccessary_files(SuiteDirN) ->
+ InputDirN = filename:join(SuiteDirN, ?input_files_directory),
+ check_file_exists(InputDirN, directory),
+ OptionsFileN = filename:join(SuiteDirN, ?dialyzer_option_file),
+ check_file_exists(OptionsFileN, regular),
+ {OptionsFileN, InputDirN}.
+
+check_file_exists(Filename, Type) ->
+ case file:read_file_info(Filename) of
+ {ok, FileInfo} ->
+ case FileInfo#file_info.type of
+ Type -> ok;
+ Else -> exit({error, {wrong_input_file_type, Else}})
+ end;
+ {error, _} = E -> exit({E, Filename, Type})
+ end.
+
+generate_suite(SuiteName, OutputFile, OptionsFileN, InputDirN) ->
+ Options = read_options(OptionsFileN),
+ TestCases = list_testcases(InputDirN),
+ Suite = #suite{suitename = SuiteName, outputfile = OutputFile,
+ options = Options, testcases = TestCases},
+ write_suite(Suite),
+ file:close(OutputFile).
+
+read_options(OptionsFileN) ->
+ case file:consult(OptionsFileN) of
+ {ok, Opts} -> read_options(Opts, #options{});
+ _ = E -> exit({error, {incorrect_options_file, E}})
+ end.
+
+read_options([List], Options) when is_list(List) ->
+ read_options(List, Options);
+read_options([], Options) ->
+ Options;
+read_options([{time_limit, TimeLimit}|Opts], Options) ->
+ read_options(Opts, Options#options{time_limit = TimeLimit});
+read_options([{dialyzer_options, DialyzerOptions}|Opts], Options) ->
+ read_options(Opts, Options#options{dialyzer_options = DialyzerOptions}).
+
+list_testcases(Dirname) ->
+ {ok, Files} = file_utils:list_dir(Dirname, ".erl", true),
+ [list_to_atom(filename:basename(F,".erl")) || F <-Files].
+
+write_suite(Suite) ->
+ write_header(Suite),
+ write_consistency(Suite),
+ write_testcases(Suite).
+
+write_header(#suite{suitename = SuiteName, outputfile = OutputFile,
+ options = Options, testcases = TestCases}) ->
+ Test_Plus_Consistency =
+ [list_to_atom(SuiteName ++ ?suite_suffix ++ "_consistency")|TestCases],
+ Exports = format_export(Test_Plus_Consistency),
+ TimeLimit = Options#options.time_limit,
+ DialyzerOptions = Options#options.dialyzer_options,
+ io:format(OutputFile,
+ "%% ATTENTION!\n"
+ "%% This is an automatically generated file. Do not edit.\n"
+ "%% Use './remake' script to refresh it if needed.\n"
+ "%% All Dialyzer options should be defined in dialyzer_options\n"
+ "%% file.\n\n"
+ "-module(~s).\n\n"
+ "-include(\"ct.hrl\").\n"
+ "-include(\"dialyzer_test_constants.hrl\").\n\n"
+ "-export([suite/0, init_per_suite/0, init_per_suite/1,\n"
+ " end_per_suite/1, all/0]).\n"
+ "~s\n\n"
+ "suite() ->\n"
+ " [{timetrap, {minutes, ~w}}].\n\n"
+ "init_per_suite() ->\n"
+ " [{timetrap, ?plt_timeout}].\n"
+ "init_per_suite(Config) ->\n"
+ " OutDir = ?config(priv_dir, Config),\n"
+ " case dialyzer_common:check_plt(OutDir) of\n"
+ " fail -> {skip, \"Plt creation/check failed.\"};\n"
+ " ok -> [{dialyzer_options, ~p}|Config]\n"
+ " end.\n\n"
+ "end_per_suite(_Config) ->\n"
+ " ok.\n\n"
+ "all() ->\n"
+ " ~p.\n\n"
+ "dialyze(Config, TestCase) ->\n"
+ " Opts = ?config(dialyzer_options, Config),\n"
+ " Dir = ?config(data_dir, Config),\n"
+ " OutDir = ?config(priv_dir, Config),\n"
+ " dialyzer_common:check(TestCase, Opts, Dir, OutDir)."
+ "\n\n"
+ ,[SuiteName ++ ?suite_suffix, Exports, TimeLimit,
+ DialyzerOptions, Test_Plus_Consistency]).
+
+format_export(TestCases) ->
+ TestCasesArity =
+ [list_to_atom(atom_to_list(N)++"/1") || N <- TestCases],
+ TestCaseString = io_lib:format("-export(~p).", [TestCasesArity]),
+ strip_quotes(lists:flatten(TestCaseString),[]).
+
+strip_quotes([], Result) ->
+ lists:reverse(Result);
+strip_quotes([$' |Rest], Result) ->
+ strip_quotes(Rest, Result);
+strip_quotes([$\, |Rest], Result) ->
+ strip_quotes(Rest, [$\ , $\, |Result]);
+strip_quotes([C|Rest], Result) ->
+ strip_quotes(Rest, [C|Result]).
+
+write_consistency(#suite{suitename = SuiteName, outputfile = OutputFile}) ->
+ write_consistency(SuiteName, OutputFile).
+
+write_consistency(SuiteName, OutputFile) ->
+ io:format(OutputFile,
+ "~s_consistency(Config) ->\n"
+ " Dir = ?config(data_dir, Config),\n"
+ " case dialyzer_common:new_tests(Dir, all()) of\n"
+ " [] -> ok;\n"
+ " New -> ct:fail({missing_tests,New})\n"
+ " end.\n\n",
+ [SuiteName ++ ?suite_suffix]).
+
+write_testcases(#suite{outputfile = OutputFile, testcases = TestCases}) ->
+ write_testcases(OutputFile, TestCases).
+
+write_testcases(OutputFile, [TestCase| Rest]) ->
+ io:format(OutputFile,
+ "~p(Config) ->\n"
+ " case dialyze(Config, ~p) of\n"
+ " 'same' -> 'same';\n"
+ " Error -> ct:fail(Error)\n"
+ " end.\n\n",
+ [TestCase, TestCase]),
+ write_testcases(OutputFile, Rest);
+write_testcases(_OutputFile, []) ->
+ ok.
diff --git a/lib/dialyzer/test/dialyzer_test_constants.hrl b/lib/dialyzer/test/dialyzer_test_constants.hrl
new file mode 100644
index 0000000000..5672327724
--- /dev/null
+++ b/lib/dialyzer/test/dialyzer_test_constants.hrl
@@ -0,0 +1 @@
+-define(plt_timeout, {hours, 2}).
diff --git a/lib/dialyzer/test/file_utils.erl b/lib/dialyzer/test/file_utils.erl
new file mode 100644
index 0000000000..36b368760c
--- /dev/null
+++ b/lib/dialyzer/test/file_utils.erl
@@ -0,0 +1,155 @@
+-module(file_utils).
+
+-export([list_dir/3, file_type/1, diff/2]).
+
+-include_lib("kernel/include/file.hrl").
+
+-type ext_posix()::posix()|'badarg'.
+-type posix()::atom().
+
+-spec list_dir(file:filename(), string(), boolean()) ->
+ {error, ext_posix()} | {ok, [file:filename()]}.
+
+list_dir(Dir, Extension, Dirs) ->
+ case file:list_dir(Dir) of
+ {error, _} = Error-> Error;
+ {ok, Filenames} ->
+ FullFilenames = [filename:join(Dir, F) || F <-Filenames ],
+ Matches1 = case Dirs of
+ true ->
+ [F || F <- FullFilenames,
+ file_type(F) =:= {ok, 'directory'}];
+ false -> []
+ end,
+ Matches2 = [F || F <- FullFilenames,
+ file_type(F) =:= {ok, 'regular'},
+ filename:extension(F) =:= Extension],
+ {ok, lists:sort(Matches1 ++ Matches2)}
+ end.
+
+-spec file_type(file:filename()) ->
+ {ok, 'device' | 'directory' | 'regular' | 'other'} |
+ {error, ext_posix()}.
+
+file_type(Filename) ->
+ case file:read_file_info(Filename) of
+ {ok, FI} -> {ok, FI#file_info.type};
+ Error -> Error
+ end.
+
+-type diff_result()::'same' | {'differ', diff_list()} |
+ {error, {file:filename(), term()}}.
+-type diff_list()::[{id(), line(), string()}].
+-type id()::'new'|'old'.
+-type line()::non_neg_integer().
+
+-spec diff(file:filename(), file:filename()) -> diff_result().
+
+diff(Filename1, Filename2) ->
+ File1 =
+ case file:open(Filename1, [read]) of
+ {ok, F1} -> {file, F1};
+ _ -> empty
+ end,
+ File2 =
+ case file:open(Filename2, [read]) of
+ {ok, F2} -> {file, F2};
+ _ -> empty
+ end,
+ case diff1(File1, File2) of
+ {error, {N, Error}} ->
+ case N of
+ 1 -> {error, {Filename1, Error}};
+ 2 -> {error, {Filename2, Error}}
+ end;
+ [] -> 'same';
+ DiffList -> {'differ', DiffList}
+ end.
+
+diff1(File1, File2) ->
+ case file_to_lines(File1) of
+ {error, Error} -> {error, {1, Error}};
+ Lines1 ->
+ case file_to_lines(File2) of
+ {error, Error} -> {error, {2, Error}};
+ Lines2 ->
+ Common = lcs_fast(Lines1, Lines2),
+ diff2(Lines1, 1, Lines2, 1, Common, [])
+ end
+ end.
+
+diff2([], _, [], _, [], Acc) -> lists:keysort(2,Acc);
+diff2([H1|T1], N1, [], N2, [], Acc) ->
+ diff2(T1, N1+1, [], N2, [], [{new, N1, H1}|Acc]);
+diff2([], N1, [H2|T2], N2, [], Acc) ->
+ diff2([], N1, T2, N2+1, [], [{old, N2, H2}|Acc]);
+diff2([H1|T1], N1, [H2|T2], N2, [], Acc) ->
+ diff2(T1, N1+1, T2, N2+1, [], [{new, N1, H1}, {old, N2, H2}|Acc]);
+diff2([H1|T1]=L1, N1, [H2|T2]=L2, N2, [HC|TC]=LC, Acc) ->
+ case H1 =:= H2 of
+ true -> diff2(T1, N1+1, T2, N2+1, TC, Acc);
+ false ->
+ case H1 =:= HC of
+ true -> diff2(L1, N1, T2, N2+1, LC, [{old, N2, H2}|Acc]);
+ false -> diff2(T1, N1+1, L2, N2, LC, [{new, N1, H1}|Acc])
+ end
+ end.
+
+-spec lcs_fast([string()], [string()]) -> [string()].
+
+lcs_fast(S1, S2) ->
+ M = length(S1),
+ N = length(S2),
+ Acc = array:new(M*N, {default, 0}),
+ {L, _} = lcs_fast(S1, S2, 1, 1, N, Acc),
+ L.
+
+-spec lcs_fast([string()], [string()],
+ pos_integer(), pos_integer(),
+ non_neg_integer(), array()) -> {[string()], array()}.
+
+lcs_fast([], _, _, _, _, Acc) ->
+ {[], Acc};
+lcs_fast(_, [], _, _, _, Acc) ->
+ {[], Acc};
+lcs_fast([H1|T1] = S1, [H2|T2] = S2, N1, N2, N, Acc) ->
+ I = (N1-1) * N + N2 - 1,
+ case array:get(I, Acc) of
+ 0 ->
+ case string:equal(H1, H2) of
+ true ->
+ {T, NAcc} = lcs_fast(T1, T2, N1+1, N2+1, N, Acc),
+ L = [H1|T],
+ {L, array:set(I, L, NAcc)};
+ false ->
+ {L1, NAcc1} = lcs_fast(S1, T2, N1, N2+1, N, Acc),
+ {L2, NAcc2} = lcs_fast(T1, S2, N1+1, N2, N, NAcc1),
+ L = longest(L1, L2),
+ {L, array:set(I, L, NAcc2)}
+ end;
+ L ->
+ {L, Acc}
+ end.
+
+-spec longest([string()], [string()]) -> [string()].
+
+longest(S1, S2) ->
+ case length(S1) > length(S2) of
+ true -> S1;
+ false -> S2
+ end.
+
+file_to_lines(empty) ->
+ [];
+file_to_lines({file, File}) ->
+ case file_to_lines(File, []) of
+ {error, _} = Error -> Error;
+ Lines -> lists:reverse(Lines)
+ end.
+
+file_to_lines(File, Acc) ->
+ case io:get_line(File, "") of
+ {error, _}=Error -> Error;
+ eof -> Acc;
+ A -> file_to_lines(File, [A|Acc])
+ end.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE.erl b/lib/dialyzer/test/opaque_tests_SUITE.erl
new file mode 100644
index 0000000000..6b90e7a646
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE.erl
@@ -0,0 +1,184 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(opaque_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([opaque_tests_SUITE_consistency/1, array/1, crash/1, dict/1,
+ ets/1, gb_sets/1, inf_loop1/1, int/1, mixed_opaque/1,
+ my_digraph/1, my_queue/1, opaque/1, queue/1, rec/1, timer/1,
+ union/1, wings/1, zoltan_kis1/1, zoltan_kis2/1, zoltan_kis3/1,
+ zoltan_kis4/1, zoltan_kis5/1, zoltan_kis6/1]).
+
+suite() ->
+ [{timetrap, {minutes, 1}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, [{warnings,[no_unused,no_return]}]}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [opaque_tests_SUITE_consistency,array,crash,dict,ets,gb_sets,inf_loop1,int,
+ mixed_opaque,my_digraph,my_queue,opaque,queue,rec,timer,union,wings,
+ zoltan_kis1,zoltan_kis2,zoltan_kis3,zoltan_kis4,zoltan_kis5,zoltan_kis6].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+opaque_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+array(Config) ->
+ case dialyze(Config, array) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+crash(Config) ->
+ case dialyze(Config, crash) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+dict(Config) ->
+ case dialyze(Config, dict) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets(Config) ->
+ case dialyze(Config, ets) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+gb_sets(Config) ->
+ case dialyze(Config, gb_sets) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+inf_loop1(Config) ->
+ case dialyze(Config, inf_loop1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+int(Config) ->
+ case dialyze(Config, int) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mixed_opaque(Config) ->
+ case dialyze(Config, mixed_opaque) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+my_digraph(Config) ->
+ case dialyze(Config, my_digraph) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+my_queue(Config) ->
+ case dialyze(Config, my_queue) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+opaque(Config) ->
+ case dialyze(Config, opaque) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+queue(Config) ->
+ case dialyze(Config, queue) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+rec(Config) ->
+ case dialyze(Config, rec) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+timer(Config) ->
+ case dialyze(Config, timer) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+union(Config) ->
+ case dialyze(Config, union) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+wings(Config) ->
+ case dialyze(Config, wings) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+zoltan_kis1(Config) ->
+ case dialyze(Config, zoltan_kis1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+zoltan_kis2(Config) ->
+ case dialyze(Config, zoltan_kis2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+zoltan_kis3(Config) ->
+ case dialyze(Config, zoltan_kis3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+zoltan_kis4(Config) ->
+ case dialyze(Config, zoltan_kis4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+zoltan_kis5(Config) ->
+ case dialyze(Config, zoltan_kis5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+zoltan_kis6(Config) ->
+ case dialyze(Config, zoltan_kis6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..3ff26b87db
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/dialyzer_options
@@ -0,0 +1 @@
+{dialyzer_options, [{warnings, [no_unused, no_return]}]}.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/array b/lib/dialyzer/test/opaque_tests_SUITE_data/results/array
new file mode 100644
index 0000000000..b05d088a03
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/array
@@ -0,0 +1,3 @@
+
+array_use.erl:12: The type test is_tuple(array()) breaks the opaqueness of the term array()
+array_use.erl:9: The attempt to match a term of type array() against the pattern {'array', _, _, 'undefined', _} breaks the opaqueness of the term
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/crash b/lib/dialyzer/test/opaque_tests_SUITE_data/results/crash
new file mode 100644
index 0000000000..6bdd934169
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/crash
@@ -0,0 +1,7 @@
+
+crash_1.erl:42: The specification for crash_1:empty/0 states that the function might also return crash_1:targetlist() but the inferred return is none()
+crash_1.erl:45: Record construction #targetlist{list::[]} violates the declared type of field list::'undefined' | crash_1:target()
+crash_1.erl:48: The call crash_1:get_using_branch2(Branch::maybe_improper_list(),L::'undefined' | crash_1:target()) contains an opaque term as 2nd argument when terms of different types are expected in these positions
+crash_1.erl:50: The pattern <_Branch, []> can never match the type <maybe_improper_list(),'undefined' | crash_1:target()>
+crash_1.erl:52: The pattern <Branch, [H = {'target', _, _} | _T]> can never match the type <maybe_improper_list(),'undefined' | crash_1:target()>
+crash_1.erl:54: The pattern <Branch, [{'target', _, _} | T]> can never match the type <maybe_improper_list(),'undefined' | crash_1:target()>
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/dict b/lib/dialyzer/test/opaque_tests_SUITE_data/results/dict
new file mode 100644
index 0000000000..5c6bf6a927
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/dict
@@ -0,0 +1,15 @@
+
+dict_use.erl:41: The attempt to match a term of type dict() against the pattern 'gazonk' breaks the opaqueness of the term
+dict_use.erl:45: The attempt to match a term of type dict() against the pattern [] breaks the opaqueness of the term
+dict_use.erl:46: The attempt to match a term of type dict() against the pattern 42 breaks the opaqueness of the term
+dict_use.erl:51: The attempt to match a term of type dict() against the pattern [] breaks the opaqueness of the term
+dict_use.erl:52: The attempt to match a term of type dict() against the pattern 42 breaks the opaqueness of the term
+dict_use.erl:58: Attempt to test for equality between a term of type maybe_improper_list() and a term of opaque type dict()
+dict_use.erl:60: Attempt to test for inequality between a term of type atom() and a term of opaque type dict()
+dict_use.erl:64: Guard test length(D::dict()) breaks the opaqueness of its argument
+dict_use.erl:65: Guard test is_atom(D::dict()) breaks the opaqueness of its argument
+dict_use.erl:66: Guard test is_list(D::dict()) breaks the opaqueness of its argument
+dict_use.erl:70: The type test is_list(dict()) breaks the opaqueness of the term dict()
+dict_use.erl:73: The call dict:fetch('foo',[1 | 2 | 3,...]) does not have an opaque term of type dict() as 2nd argument
+dict_use.erl:76: The call dict:merge(Fun::any(),42,[1 | 2,...]) does not have opaque terms as 2nd and 3rd arguments
+dict_use.erl:79: The call dict:store(42,'elli',{'dict',0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}}) does not have an opaque term of type dict() as 3rd argument
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/ets b/lib/dialyzer/test/opaque_tests_SUITE_data/results/ets
new file mode 100644
index 0000000000..5498ba1538
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/ets
@@ -0,0 +1,3 @@
+
+ets_use.erl:12: Guard test is_integer(T::atom() | tid()) breaks the opaqueness of its argument
+ets_use.erl:7: Guard test is_integer(T::tid()) breaks the opaqueness of its argument
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/gb_sets b/lib/dialyzer/test/opaque_tests_SUITE_data/results/gb_sets
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/gb_sets
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/inf_loop1 b/lib/dialyzer/test/opaque_tests_SUITE_data/results/inf_loop1
new file mode 100644
index 0000000000..eb8f304905
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/inf_loop1
@@ -0,0 +1,5 @@
+
+inf_loop1.erl:119: The pattern [{_, LNorms}] can never match the type []
+inf_loop1.erl:121: The pattern [{LinksA, LNormA}, {LinksB, LNormB}] can never match the type []
+inf_loop1.erl:129: The pattern [{_, Norm} | _] can never match the type []
+inf_loop1.erl:71: The call gb_trees:get(Edge::any(),Etab::array()) contains an opaque term as 2nd argument when terms of different types are expected in these positions
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/int b/lib/dialyzer/test/opaque_tests_SUITE_data/results/int
new file mode 100644
index 0000000000..3ee4def34b
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/int
@@ -0,0 +1,3 @@
+
+int_adt.erl:28: Invalid type specification for function int_adt:add_f/2. The success typing is (number(),float()) -> number()
+int_adt.erl:32: Invalid type specification for function int_adt:div_f/2. The success typing is (number(),number()) -> float()
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/mixed_opaque b/lib/dialyzer/test/opaque_tests_SUITE_data/results/mixed_opaque
new file mode 100644
index 0000000000..ab850b613e
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/mixed_opaque
@@ -0,0 +1,2 @@
+
+mixed_opaque_use.erl:31: The call mixed_opaque_rec_adt:get_a(Q::mixed_opaque_queue_adt:my_queue()) contains an opaque term as 1st argument when an opaque term of type mixed_opaque_rec_adt:rec() is expected
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/my_digraph b/lib/dialyzer/test/opaque_tests_SUITE_data/results/my_digraph
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/my_digraph
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/my_queue b/lib/dialyzer/test/opaque_tests_SUITE_data/results/my_queue
new file mode 100644
index 0000000000..2860b91084
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/my_queue
@@ -0,0 +1,7 @@
+
+my_queue_use.erl:15: The call my_queue_adt:is_empty([]) does not have an opaque term of type my_queue_adt:my_queue() as 1st argument
+my_queue_use.erl:19: The call my_queue_adt:add(42,Q0::[]) does not have an opaque term of type my_queue_adt:my_queue() as 2nd argument
+my_queue_use.erl:24: The attempt to match a term of type my_queue_adt:my_queue() against the pattern [42 | Q2] breaks the opaqueness of the term
+my_queue_use.erl:30: Attempt to test for equality between a term of type [] and a term of opaque type my_queue_adt:my_queue()
+my_queue_use.erl:34: Cons will produce an improper list since its 2nd argument is my_queue_adt:my_queue()
+my_queue_use.erl:34: The call my_queue_adt:dequeue(nonempty_improper_list(42,my_queue_adt:my_queue())) does not have an opaque term of type my_queue_adt:my_queue() as 1st argument
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/opaque b/lib/dialyzer/test/opaque_tests_SUITE_data/results/opaque
new file mode 100644
index 0000000000..ca76f57b54
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/opaque
@@ -0,0 +1,2 @@
+
+opaque_bug4.erl:20: The attempt to match a term of type opaque_adt:abc() against the pattern 'a' breaks the opaqueness of the term
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/queue b/lib/dialyzer/test/opaque_tests_SUITE_data/results/queue
new file mode 100644
index 0000000000..59ce33f098
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/queue
@@ -0,0 +1,11 @@
+
+queue_use.erl:18: The call queue:is_empty({[],[]}) does not have an opaque term of type queue() as 1st argument
+queue_use.erl:22: The call queue:in(42,Q0::{[],[]}) does not have an opaque term of type queue() as 2nd argument
+queue_use.erl:27: The attempt to match a term of type queue() against the pattern {"*", Q2} breaks the opaqueness of the term
+queue_use.erl:33: Attempt to test for equality between a term of type {[42,...],[]} and a term of opaque type queue()
+queue_use.erl:36: The attempt to match a term of type queue() against the pattern {F, _R} breaks the opaqueness of the term
+queue_use.erl:40: The call queue:out({[42,...],[]}) does not have an opaque term of type queue() as 1st argument
+queue_use.erl:51: The call queue_use:is_in_queue(E::42,DB::#db{p::[],q::queue()}) contains an opaque term as 2nd argument when terms of different types are expected in these positions
+queue_use.erl:56: The attempt to match a term of type #db{p::[],q::queue()} against the pattern {'db', _, {L1, L2}} breaks the opaqueness of queue()
+queue_use.erl:62: The call queue_use:tuple_queue({42,'gazonk'}) does not have a term of type {_,queue()} (with opaque subterms) as 1st argument
+queue_use.erl:65: The call queue:in(F::42,Q::'gazonk') does not have an opaque term of type queue() as 2nd argument
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/rec b/lib/dialyzer/test/opaque_tests_SUITE_data/results/rec
new file mode 100644
index 0000000000..72736b3b3c
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/rec
@@ -0,0 +1,6 @@
+
+rec_use.erl:17: The attempt to match a term of type rec_adt:rec() against the pattern {'rec', _, 42} breaks the opaqueness of the term
+rec_use.erl:18: Guard test tuple_size(R::rec_adt:rec()) breaks the opaqueness of its argument
+rec_use.erl:23: The call rec_adt:get_a(R::tuple()) does not have an opaque term of type rec_adt:rec() as 1st argument
+rec_use.erl:27: Attempt to test for equality between a term of type {'rec','gazonk',42} and a term of opaque type rec_adt:rec()
+rec_use.erl:30: The call erlang:tuple_size(rec_adt:rec()) contains an opaque term as 1st argument when a structured term of type tuple() is expected
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/timer b/lib/dialyzer/test/opaque_tests_SUITE_data/results/timer
new file mode 100644
index 0000000000..e917b76b08
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/timer
@@ -0,0 +1,4 @@
+
+timer_use.erl:16: The pattern 'gazonk' can never match the type {'error',_} | {'ok',timer:tref()}
+timer_use.erl:17: The attempt to match a term of type {'ok',timer:tref()} against the pattern {'ok', 42} breaks the opaqueness of timer:tref()
+timer_use.erl:18: The attempt to match a term of type {'error',_} | {'ok',timer:tref()} against the pattern {Tag, 'gazonk'} breaks the opaqueness of timer:tref()
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/union b/lib/dialyzer/test/opaque_tests_SUITE_data/results/union
new file mode 100644
index 0000000000..98829b424a
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/union
@@ -0,0 +1,5 @@
+
+union_use.erl:12: The attempt to match a term of type union_adt:u() against the pattern 'aaa' breaks the opaqueness of the term
+union_use.erl:16: The type test is_tuple(union_adt:u()) breaks the opaqueness of the term union_adt:u()
+union_use.erl:7: Guard test is_atom(A::union_adt:u()) breaks the opaqueness of its argument
+union_use.erl:8: Guard test is_tuple(T::union_adt:u()) breaks the opaqueness of its argument
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/results/wings b/lib/dialyzer/test/opaque_tests_SUITE_data/results/wings
new file mode 100644
index 0000000000..a9571441f8
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/results/wings
@@ -0,0 +1,11 @@
+
+wings_dissolve.erl:103: Guard test is_list(List::gb_set()) breaks the opaqueness of its argument
+wings_dissolve.erl:19: Guard test is_list(Faces::gb_set()) breaks the opaqueness of its argument
+wings_dissolve.erl:272: Guard test is_list(Faces::gb_set()) breaks the opaqueness of its argument
+wings_dissolve.erl:31: The call gb_sets:is_empty(Faces::[any(),...]) does not have an opaque term of type gb_set() as 1st argument
+wings_edge.erl:205: The pattern <Edge, 'hard', Htab> can never match the type <_,'soft',_>
+wings_edge_cmd.erl:30: The call gb_trees:size(P::gb_set()) contains an opaque term as 1st argument when an opaque term of type gb_tree() is expected
+wings_edge_cmd.erl:32: The pattern [_ | Parts] can never match the type []
+wings_edge_cmd.erl:32: The pattern [{_, P} | _] can never match the type []
+wings_io.erl:30: The attempt to match a term of type {'empty',queue()} against the pattern {'empty', {In, Out}} breaks the opaqueness of queue()
+wings_we.erl:155: The call wings_util:gb_trees_largest_key(Etab::gb_tree()) contains an opaque term as 1st argument when a structured term of type {_,{_,_,_,'nil' | {_,_,_,'nil' | {_,_,_,_}}}} is expected
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/array/array_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/array/array_use.erl
new file mode 100644
index 0000000000..1702dc8f03
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/array/array_use.erl
@@ -0,0 +1,15 @@
+-module(array_use).
+
+-export([ok1/0, wrong1/0, wrong2/0]).
+
+ok1() ->
+ array:set(17, gazonk, array:new()).
+
+wrong1() ->
+ {array, _, _, undefined, _} = array:new(42).
+
+wrong2() ->
+ case is_tuple(array:new(42)) of
+ true -> structure_is_exposed;
+ false -> cannot_possibly_be
+ end.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/crash/crash_1.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/crash/crash_1.erl
new file mode 100644
index 0000000000..eebeed15af
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/crash/crash_1.erl
@@ -0,0 +1,55 @@
+%%%-------------------------------------------------------------------
+%%% From : Fredrik Thulin <[email protected]>
+%%%
+%%% A module with an erroneous record field declaration which mixes up
+%%% structured and opaque terms and causes a crash in dialyzer.
+%%%
+%%% In addition, it revealed that the compiler produced extraneous
+%%% warnings about unused record definitions when in fact they are
+%%% needed for type declarations. This is now fixed.
+%%%-------------------------------------------------------------------
+-module(crash_1).
+
+-export([add/3, empty/0]).
+
+%%--------------------------------------------------------------------
+
+-record(sipurl, {proto = "sip" :: string(), host :: string()}).
+-record(keylist, {list = [] :: [_]}).
+-type sip_headers() :: #keylist{}.
+-record(request, {uri :: #sipurl{}, header :: sip_headers()}).
+-type sip_request() :: #request{}.
+
+%%--------------------------------------------------------------------
+
+-record(target, {branch :: string(), request :: sip_request()}).
+-opaque target() :: #target{}.
+
+-record(targetlist, {list :: target()}). % XXX: THIS ONE SHOULD READ [target()]
+-opaque targetlist() :: #targetlist{}.
+
+%%====================================================================
+
+add(Branch, #request{} = Request, #targetlist{list = L} = TargetList) ->
+ case get_using_branch(Branch, TargetList) of
+ none ->
+ NewTarget = #target{branch = Branch, request = Request},
+ #targetlist{list = L ++ [NewTarget]};
+ #target{} ->
+ TargetList
+ end.
+
+-spec empty() -> targetlist().
+
+empty() ->
+ #targetlist{list = []}.
+
+get_using_branch(Branch, #targetlist{list = L}) when is_list(Branch) ->
+ get_using_branch2(Branch, L).
+
+get_using_branch2(_Branch, []) ->
+ none;
+get_using_branch2(Branch, [#target{branch=Branch}=H | _T]) ->
+ H;
+get_using_branch2(Branch, [#target{} | T]) ->
+ get_using_branch2(Branch, T).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/dict/dict_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/dict/dict_use.erl
new file mode 100644
index 0000000000..2a632a910d
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/dict/dict_use.erl
@@ -0,0 +1,83 @@
+-module(dict_use).
+
+-export([ok1/0, ok2/0, ok3/0, ok4/0, ok5/0, ok6/0]).
+-export([middle/0]).
+-export([w1/0, w2/0, w3/0, w4/1, w5/0, w6/0, w7/0, w8/1, w9/0]).
+
+-define(DICT, dict).
+
+%%---------------------------------------------------------------------
+%% Cases that are OK
+%%---------------------------------------------------------------------
+
+ok1() ->
+ dict:new().
+
+ok2() ->
+ case dict:new() of X -> X end.
+
+ok3() ->
+ Dict1 = dict:new(),
+ Dict2 = dict:new(),
+ Dict1 =:= Dict2.
+
+ok4() ->
+ dict:fetch(foo, dict:new()).
+
+ok5() -> % this is OK since some_mod:new/0 might be returning a dict()
+ dict:fetch(foo, some_mod:new()).
+
+ok6() ->
+ dict:store(42, elli, dict:new()).
+
+middle() ->
+ {w1(), w2()}.
+
+%%---------------------------------------------------------------------
+%% Cases that are problematic w.r.t. opaqueness of types
+%%---------------------------------------------------------------------
+
+w1() ->
+ gazonk = dict:new().
+
+w2() ->
+ case dict:new() of
+ [] -> nil;
+ 42 -> weird
+ end.
+
+w3() ->
+ try dict:new() of
+ [] -> nil;
+ 42 -> weird
+ catch
+ _:_ -> exception
+ end.
+
+w4(Dict) when is_list(Dict) ->
+ Dict =:= dict:new();
+w4(Dict) when is_atom(Dict) ->
+ Dict =/= dict:new().
+
+w5() ->
+ case dict:new() of
+ D when length(D) =/= 42 -> weird;
+ D when is_atom(D) -> weirder;
+ D when is_list(D) -> gazonk
+ end.
+
+w6() ->
+ is_list(dict:new()).
+
+w7() ->
+ dict:fetch(foo, [1,2,3]).
+
+w8(Fun) ->
+ dict:merge(Fun, 42, [1,2]).
+
+w9() ->
+ dict:store(42, elli,
+ {dict,0,16,16,8,80,48,
+ {[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},
+ {{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}}).
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/ets/ets_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/ets/ets_use.erl
new file mode 100644
index 0000000000..20be9803eb
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/ets/ets_use.erl
@@ -0,0 +1,17 @@
+-module(ets_use).
+-export([t1/0, t2/0]).
+
+t1() ->
+ case n() of
+ T when is_atom(T) -> atm;
+ T when is_integer(T) -> int
+ end.
+
+t2() ->
+ case n() of
+ T when is_integer(T) -> int;
+ T when is_atom(T) -> atm
+ end.
+
+n() -> ets:new(n, [named_table]).
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/gb_sets/gb_sets_rec.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/gb_sets/gb_sets_rec.erl
new file mode 100644
index 0000000000..008b0a486a
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/gb_sets/gb_sets_rec.erl
@@ -0,0 +1,23 @@
+%%---------------------------------------------------------------------
+%% This module does not test gb_sets. Instead it tests that we can
+%% create records whose fields are declared with an opaque type and
+%% retrieve these fields without problems. Unitialized record fields
+%% used to cause trouble for the analysis due to the implicit
+%% 'undefined' value that record fields contain. The problem was the
+%% strange interaction of ?opaque() and ?union() in the definition of
+%% erl_types:t_inf/3. This was fixed 18/1/2009.
+%% --------------------------------------------------------------------
+
+-module(gb_sets_rec).
+
+-export([new/0, get_g/1]).
+
+-record(rec, {g :: gb_set()}).
+
+-spec new() -> #rec{}.
+new() ->
+ #rec{g = gb_sets:empty()}.
+
+-spec get_g(#rec{}) -> gb_set().
+get_g(R) ->
+ R#rec.g.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/inf_loop1.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/inf_loop1.erl
new file mode 100644
index 0000000000..0dff16cf14
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/inf_loop1.erl
@@ -0,0 +1,172 @@
+%% -*- erlang-indent-level: 2 -*-
+%%----------------------------------------------------------------------------
+%% Non-sensical (i.e., stripped-down) program that sends the analysis
+%% into an infinite loop. The #we.es field was originally a gb_tree()
+%% but the programmer declared it as an array in order to change it to
+%% that data type instead. In the file, there are two calls to function
+%% gb_trees:get/2 which seem to be the ones responsible for sending the
+%% analysis into an infinite loop. Currently, these calls are marked and
+%% have been changed to gbee_trees:get/2 in order to be able to see that
+%% the analysis works if these two calls are taken out of the picture.
+%%----------------------------------------------------------------------------
+-module(inf_loop1).
+
+-export([command/1]).
+
+-record(we, {id,
+ es = array:new() :: array(),
+ vp,
+ mirror = none}).
+-record(edge, {vs,ve,a = none,b = none,lf,rf,ltpr,ltsu,rtpr,rtsu}).
+
+command(St) ->
+ State = drag_mode(offset_region),
+ SetupSt = wings_sel_conv:more(St),
+ Tvs = wings_sel:fold(fun(Faces, #we{id = Id} = We, Acc) ->
+ FaceRegions = wings_sel:face_regions(Faces, We),
+ {AllVs0,VsData} =
+ collect_offset_regions_data(FaceRegions, We, [], []),
+ AllVs = ordsets:from_list(AllVs0),
+ [{Id,{AllVs,offset_regions_fun(VsData, State)}}|Acc]
+ end,
+ [],
+ SetupSt),
+ wings_drag:setup(Tvs, 42, [], St).
+
+drag_mode(Type) ->
+ {Mode,Norm} = wings_pref:get_value(Type, {average,loop}),
+ {Type,Mode,Norm}.
+
+collect_offset_regions_data([Faces|Regions], We, AllVs, VsData) ->
+ {FaceNormTab,OuterEdges,RegVs} =
+ some_fake_module:faces_data_0(Faces, We, [], [], []),
+ {LoopNorm,LoopVsData,LoopVs} =
+ offset_regions_loop_data(OuterEdges, Faces, We, FaceNormTab),
+ Vs = RegVs -- LoopVs,
+ RegVsData = vertex_normals(Vs, FaceNormTab, We, LoopVsData),
+ collect_offset_regions_data(Regions, We, RegVs ++ AllVs,
+ [{LoopNorm,RegVsData}|VsData]);
+collect_offset_regions_data([], _, AllVs, VsData) ->
+ {AllVs,VsData}.
+
+offset_regions_loop_data(Edges, Faces, We, FNtab) ->
+ EdgeSet = gb_sets:from_list(Edges),
+ offset_loop_data_0(EdgeSet, Faces, We, FNtab, [], [], []).
+
+offset_loop_data_0(EdgeSet0, Faces, We, FNtab, LNorms, VData0, Vs0) ->
+ case gb_sets:is_empty(EdgeSet0) of
+ false ->
+ {Edge,EdgeSet1} = gb_sets:take_smallest(EdgeSet0),
+ {EdgeSet,VData,Links,LoopNorm,Vs} =
+ offset_loop_data_1(Edge, EdgeSet1, Faces, We, FNtab, VData0, Vs0),
+ offset_loop_data_0(EdgeSet, Faces, We, FNtab,
+ [{Links,LoopNorm}|LNorms], VData, Vs);
+ true ->
+ AvgLoopNorm = average_loop_norm(LNorms),
+ {AvgLoopNorm,VData0,Vs0}
+ end.
+
+offset_loop_data_1(Edge, EdgeSet, _Faces,
+ #we{es = Etab, vp = Vtab} = We, FNtab, VData, Vs) ->
+ #edge{vs = Va, ve = Vb, lf = Lf, ltsu = NextLeft} = gb_trees:get(Edge, Etab),
+ VposA = gb_trees:get(Va, Vtab),
+ VposB = gb_trees:get(Vb, Vtab),
+ VDir = e3d_vec:sub(VposB, VposA),
+ FNorm = wings_face:normal(Lf, We),
+ EdgeData = gb_trees:get(NextLeft, Etab),
+ offset_loop_data_2(NextLeft, EdgeData, Va, VposA, Lf, Edge, We, FNtab,
+ EdgeSet, VDir, [], [FNorm], VData, [], Vs, 0).
+
+offset_loop_data_2(CurE, #edge{vs = Va, ve = Vb, lf = PrevFace,
+ rtsu = NextEdge, ltsu = IfCurIsMember},
+ Vb, VposB, PrevFace, LastE,
+ #we{mirror = M} = We,
+ FNtab, EdgeSet0, VDir, EDir0, VNorms0, VData0, VPs0, Vs0,
+ Links) ->
+ Mirror = M == PrevFace,
+ offset_loop_is_member(Mirror, Vb, Va, VposB, CurE, IfCurIsMember, VNorms0,
+ NextEdge, EdgeSet0, VDir, EDir0, FNtab, PrevFace,
+ LastE, We, VData0, VPs0, Vs0, Links).
+
+offset_loop_is_member(Mirror, V1, V2, Vpos1, CurE, NextE, VNorms0, NEdge,
+ EdgeSet0, VDir, EDir0, FNtab, PFace, LastE, We,
+ VData0, VPs0, Vs0, Links) ->
+ #we{es = Etab, vp = Vtab} = We,
+ Vpos2 = gb_trees:get(V2, Vtab),
+ Dir = e3d_vec:sub(Vpos2, Vpos1),
+ NextVDir = e3d_vec:neg(Dir),
+ EdgeSet = gb_sets:delete(CurE, EdgeSet0),
+ EdgeData = gbee_trees:get(NextE, Etab), %% HERE
+ [FNorm|_] = VNorms0,
+ VData = offset_loop_data_3(Mirror, V1, Vpos1, VNorms0, NEdge, VDir,
+ Dir, EDir0, FNtab, We, VData0),
+ VPs = [Vpos1|VPs0],
+ Vs = [V1|Vs0],
+ offset_loop_data_2(NextE, EdgeData, V2, Vpos2, PFace, LastE, We, FNtab,
+ EdgeSet, NextVDir, [], [FNorm], VData, VPs, Vs, Links + 1).
+
+offset_loop_data_3(false, V, Vpos, VNorms0, NextEdge,
+ VDir, Dir, EDir0, FNtab, We, VData0) ->
+ #we{es = Etab} = We,
+ VNorm = e3d_vec:norm(e3d_vec:add(VNorms0)),
+ NV = wings_vertex:other(V, gbee_trees:get(NextEdge, Etab)), %% HERE
+ ANorm = vertex_normal(NV, FNtab, We),
+ EDir = some_fake_module:average_edge_dir(VNorm, VDir, Dir, EDir0),
+ AvgDir = some_fake_module:evaluate_vdata(VDir, Dir, VNorm),
+ ScaledDir = some_fake_module:along_edge_scale_factor(VDir, Dir, EDir, ANorm),
+ [{V,{Vpos,AvgDir,EDir,ScaledDir}}|VData0].
+
+average_loop_norm([{_,LNorms}]) ->
+ e3d_vec:norm(LNorms);
+average_loop_norm([{LinksA,LNormA},{LinksB,LNormB}]) ->
+ case LinksA < LinksB of
+ true ->
+ e3d_vec:norm(e3d_vec:add(e3d_vec:neg(LNormA), LNormB));
+ false ->
+ e3d_vec:norm(e3d_vec:add(e3d_vec:neg(LNormB), LNormA))
+ end;
+average_loop_norm(LNorms) ->
+ LoopNorms = [Norm || {_,Norm} <- LNorms],
+ e3d_vec:norm(e3d_vec:neg(e3d_vec:add(LoopNorms))).
+
+vertex_normals([V|Vs], FaceNormTab, #we{vp = Vtab, mirror = M} = We, Acc) ->
+ FaceNorms =
+ wings_vertex:fold(fun(_, Face, _, A) when Face == M ->
+ [e3d_vec:neg(wings_face:normal(M, We))|A];
+ (_, Face, _, A) ->
+ [gb_trees:get(Face, FaceNormTab)|A]
+ end, [], V, We),
+ VNorm = e3d_vec:norm(e3d_vec:add(FaceNorms)),
+ Vpos = gb_trees:get(V, Vtab),
+ vertex_normals(Vs, FaceNormTab, We, [{V,{Vpos,VNorm}}|Acc]);
+vertex_normals([], _, _, Acc) ->
+ Acc.
+
+vertex_normal(V, FaceNormTab, #we{mirror = M} = We) ->
+ wings_vertex:fold(fun(_, Face, _, A) when Face == M ->
+ [e3d_vec:neg(wings_face:normal(Face, We))|A];
+ (_, Face, _, A) ->
+ N = gb_trees:get(Face, FaceNormTab),
+ case e3d_vec:is_zero(N) of
+ true -> A;
+ false -> [N|A]
+ end
+ end, [], V, We).
+
+offset_regions_fun(OffsetData, {_,Solution,_} = State) ->
+ fun(new_mode_data, {NewState,_}) ->
+ offset_regions_fun(OffsetData, NewState);
+ ([Dist,_,_,Bump|_], A) ->
+ lists:foldl(fun({LoopNormal,VsData}, VsAcc0) ->
+ lists:foldl(fun({V,{Vpos0,VNorm}}, VsAcc) ->
+ [{V,Vpos0}|VsAcc];
+ ({V,{Vpos0,Dir,EDir,ScaledEDir}}, VsAcc) ->
+ Vec = case Solution of
+ average -> Dir;
+ along_edges -> EDir;
+ scaled -> ScaledEDir
+ end,
+ [{V,Vpos0}|VsAcc]
+ end, VsAcc0, VsData)
+ end, A, OffsetData)
+ end.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_adt.erl
new file mode 100644
index 0000000000..99f8cbdc4a
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_adt.erl
@@ -0,0 +1,33 @@
+%%----------------------------------------------------------------------------
+%% Module that tests consistency of spec declarations in the presence of
+%% opaque types. Contains both valid and invalid contracts with opaque types.
+%%----------------------------------------------------------------------------
+
+-module(int_adt).
+
+-export([new_i/0, add_i/2, div_i/2, add_f/2, div_f/2]).
+
+-export_type([int/0]).
+
+-opaque int() :: integer().
+
+%% the user has declared the return to be an opaque type, but the success
+%% typing inference is too strong and finds a subtype as a return: this is OK
+-spec new_i() -> int().
+new_i() -> 42.
+
+%% the success typing is more general than the contract: this is OK
+-spec add_i(int(), int()) -> int().
+add_i(X, Y) -> X + Y.
+
+%% the success typing coincides with the contract: this is OK, of course
+-spec div_i(int(), int()) -> int().
+div_i(X, Y) -> X div Y.
+
+%% the success typing has an incompatible domain element: this is invalid
+-spec add_f(int(), int()) -> int().
+add_f(X, Y) when is_float(Y) -> X + trunc(Y).
+
+%% the success typing has an incompatible range: this is invalid
+-spec div_f(int(), int()) -> int().
+div_f(X, Y) -> X / Y.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_use.erl
new file mode 100644
index 0000000000..b4471e1cee
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/int/int_use.erl
@@ -0,0 +1,11 @@
+%%---------------------------------------------------------------------------
+%% Module that uses the opaque types of int_adt.
+%% TODO: Should be extended with invalid contracts.
+%%---------------------------------------------------------------------------
+-module(int_use).
+
+-export([test/0]).
+
+-spec test() -> int_adt:int().
+test() ->
+ int_adt:new_i().
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_queue_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_queue_adt.erl
new file mode 100644
index 0000000000..ac59f19cd3
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_queue_adt.erl
@@ -0,0 +1,26 @@
+%%---------------------------------------------------------------------------
+%% A clone of 'queue_adt' so as to test its combination with 'rec_adt'
+%%---------------------------------------------------------------------------
+-module(mixed_opaque_queue_adt).
+
+-export([new/0, add/2, dequeue/1, is_empty/1]).
+
+-opaque my_queue() :: list().
+
+-spec new() -> my_queue().
+new() ->
+ [].
+
+-spec add(term(), my_queue()) -> my_queue().
+add(E, Q) ->
+ Q ++ [E].
+
+-spec dequeue(my_queue()) -> {term(), my_queue()}.
+dequeue([H|T]) ->
+ {H, T}.
+
+-spec is_empty(my_queue()) -> boolean().
+is_empty([]) ->
+ true;
+is_empty([_|_]) ->
+ false.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_rec_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_rec_adt.erl
new file mode 100644
index 0000000000..61bae5110d
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_rec_adt.erl
@@ -0,0 +1,25 @@
+%%---------------------------------------------------------------------------
+%% A clone of 'rec_adt' so as to test its combination with 'queue_adt'
+%%---------------------------------------------------------------------------
+-module(mixed_opaque_rec_adt).
+
+-export([new/0, get_a/1, get_b/1, set_a/2, set_b/2]).
+
+-record(rec, {a :: atom(), b = 0 :: integer()}).
+
+-opaque rec() :: #rec{}.
+
+-spec new() -> rec().
+new() -> #rec{a = gazonk, b = 42}.
+
+-spec get_a(rec()) -> atom().
+get_a(#rec{a = A}) -> A.
+
+-spec get_b(rec()) -> integer().
+get_b(#rec{b = B}) -> B.
+
+-spec set_a(rec(), atom()) -> rec().
+set_a(R, A) -> R#rec{a = A}.
+
+-spec set_b(rec(), integer()) -> rec().
+set_b(R, B) -> R#rec{b = B}.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_use.erl
new file mode 100644
index 0000000000..e82dcd5f38
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/mixed_opaque/mixed_opaque_use.erl
@@ -0,0 +1,31 @@
+%%---------------------------------------------------------------------------
+%% Test that tries some combinations of using more than one opaque data type
+%% in the same function(s).
+%%----------------------------------------------------------------------------
+-module(mixed_opaque_use).
+
+-export([ok1/1, ok2/0, wrong1/0]).
+
+-define(REC, mixed_opaque_rec_adt).
+-define(QUEUE, mixed_opaque_queue_adt).
+
+%% Currently returning unions of opaque types is considered OK
+ok1(Type) ->
+ case Type of
+ queue -> ?QUEUE:new();
+ rec -> ?REC:new()
+ end.
+
+%% Constructing a queue of records is OK
+ok2() ->
+ Q0 = ?QUEUE:new(),
+ R0 = ?REC:new(),
+ Q1 = ?QUEUE:add(R0, Q0),
+ {R1,_Q2} = ?QUEUE:dequeue(Q1),
+ ?REC:get_a(R1).
+
+%% But of course calling a function expecting some opaque type
+%% with some other opaque typs is not OK
+wrong1() ->
+ Q = ?QUEUE:new(),
+ ?REC:get_a(Q).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_digraph/my_digraph_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_digraph/my_digraph_adt.erl
new file mode 100644
index 0000000000..20c72aa6eb
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_digraph/my_digraph_adt.erl
@@ -0,0 +1,51 @@
+-module(my_digraph_adt).
+
+-export([new/0, new/1]).
+
+-record(my_digraph, {vtab = notable,
+ etab = notable,
+ ntab = notable,
+ cyclic = true :: boolean()}).
+
+-opaque my_digraph() :: #my_digraph{}.
+
+-type d_protection() :: 'private' | 'protected'.
+-type d_cyclicity() :: 'acyclic' | 'cyclic'.
+-type d_type() :: d_cyclicity() | d_protection().
+
+-spec new() -> my_digraph().
+new() -> new([]).
+
+-spec new([atom()]) -> my_digraph().
+new(Type) ->
+ try check_type(Type, protected, []) of
+ {Access, Ts} ->
+ V = ets:new(vertices, [set, Access]),
+ E = ets:new(edges, [set, Access]),
+ N = ets:new(neighbours, [bag, Access]),
+ ets:insert(N, [{'$vid', 0}, {'$eid', 0}]),
+ set_type(Ts, #my_digraph{vtab=V, etab=E, ntab=N})
+ catch
+ throw:Error -> throw(Error)
+ end.
+
+-spec check_type([atom()], d_protection(), [{'cyclic', boolean()}]) ->
+ {d_protection(), [{'cyclic', boolean()}]}.
+
+check_type([acyclic|Ts], A, L) ->
+ check_type(Ts, A,[{cyclic,false} | L]);
+check_type([cyclic | Ts], A, L) ->
+ check_type(Ts, A, [{cyclic,true} | L]);
+check_type([protected | Ts], _, L) ->
+ check_type(Ts, protected, L);
+check_type([private | Ts], _, L) ->
+ check_type(Ts, private, L);
+check_type([T | _], _, _) ->
+ throw({error, {unknown_type, T}});
+check_type([], A, L) -> {A, L}.
+
+-spec set_type([{'cyclic', boolean()}], my_digraph()) -> my_digraph().
+
+set_type([{cyclic,V} | Ks], G) ->
+ set_type(Ks, G#my_digraph{cyclic = V});
+set_type([], G) -> G.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_adt.erl
new file mode 100644
index 0000000000..52688062ce
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_adt.erl
@@ -0,0 +1,23 @@
+-module(my_queue_adt).
+
+-export([new/0, add/2, dequeue/1, is_empty/1]).
+
+-opaque my_queue() :: list().
+
+-spec new() -> my_queue().
+new() ->
+ [].
+
+-spec add(term(), my_queue()) -> my_queue().
+add(E, Q) ->
+ Q ++ [E].
+
+-spec dequeue(my_queue()) -> {term(), my_queue()}.
+dequeue([H|T]) ->
+ {H, T}.
+
+-spec is_empty(my_queue()) -> boolean().
+is_empty([]) ->
+ true;
+is_empty([_|_]) ->
+ false.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_use.erl
new file mode 100644
index 0000000000..98f9972c1e
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/my_queue/my_queue_use.erl
@@ -0,0 +1,35 @@
+-module(my_queue_use).
+
+-export([ok1/0, ok2/0, wrong1/0, wrong2/0, wrong3/0, wrong4/0, wrong5/0]).
+
+ok1() ->
+ my_queue_adt:is_empty(my_queue_adt:new()).
+
+ok2() ->
+ Q0 = my_queue_adt:new(),
+ Q1 = my_queue_adt:add(42, Q0),
+ {42, Q2} = my_queue_adt:dequeue(Q1),
+ my_queue_adt:is_empty(Q2).
+
+wrong1() ->
+ my_queue_adt:is_empty([]).
+
+wrong2() ->
+ Q0 = [],
+ my_queue_adt:add(42, Q0).
+
+wrong3() ->
+ Q0 = my_queue_adt:new(),
+ Q1 = my_queue_adt:add(42, Q0),
+ [42|Q2] = Q1,
+ Q2.
+
+wrong4() ->
+ Q0 = my_queue_adt:new(),
+ Q1 = my_queue_adt:add(42, Q0),
+ Q1 =:= [].
+
+wrong5() ->
+ Q0 = my_queue_adt:new(),
+ {42, Q2} = my_queue_adt:dequeue([42|Q0]),
+ Q2.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_adt.erl
new file mode 100644
index 0000000000..3456f0e9c6
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_adt.erl
@@ -0,0 +1,9 @@
+-module(opaque_adt).
+-export([atom_or_list/1]).
+
+-opaque abc() :: 'a' | 'b' | 'c'.
+
+atom_or_list(1) -> a;
+atom_or_list(2) -> b;
+atom_or_list(3) -> c;
+atom_or_list(N) -> lists:duplicate(N, a).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug1.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug1.erl
new file mode 100644
index 0000000000..ff0b1d05ab
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug1.erl
@@ -0,0 +1,17 @@
+%%---------------------------------------------------------------------
+%% A test for which the analysis went into an infinite loop due to
+%% specialization using structured type instead of the opaque one.
+%%---------------------------------------------------------------------
+
+-module(opaque_bug1).
+
+-export([test/1]).
+
+-record(c, {a::atom()}).
+
+-opaque erl_type() :: 'any' | #c{}.
+
+test(#c{a=foo} = T) -> local(T).
+
+local(#c{a=foo}) -> any.
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug2.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug2.erl
new file mode 100644
index 0000000000..f193a58f59
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug2.erl
@@ -0,0 +1,13 @@
+%%---------------------------------------------------------------------
+%% A test for which the analysis gave a bogus warning due to
+%% considering the function call name to be of opaque type...
+%%---------------------------------------------------------------------
+
+-module(opaque_bug2).
+
+-export([test/0]).
+
+-opaque o() :: 'map'.
+
+test() ->
+ lists:map(fun(X) -> X+1 end, [1,2]).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug3.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug3.erl
new file mode 100644
index 0000000000..71da82a1f6
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug3.erl
@@ -0,0 +1,19 @@
+%%---------------------------------------------------------------------
+%% A test for which the analysis gave wrong results because it did not
+%% handle the is_tuple/1 guard properly.
+%%---------------------------------------------------------------------
+
+-module(opaque_bug3).
+
+-export([test/1]).
+
+-record(c, {}).
+
+-opaque o() :: 'a' | #c{}.
+
+-spec test(o()) -> 42.
+
+test(#c{} = O) -> t(O).
+
+t(T) when is_tuple(T) -> 42;
+t(a) -> gazonk.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug4.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug4.erl
new file mode 100644
index 0000000000..a7ddc80fe8
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/opaque/opaque_bug4.erl
@@ -0,0 +1,21 @@
+%%---------------------------------------------------------------------
+%% A test for which the analysis gave wrong results due to erroneous
+%% specialization and incorrect handling of unions.
+%%---------------------------------------------------------------------
+
+-module(opaque_bug4).
+
+-export([ok/0, wrong/0]).
+
+%-spec ok() -> 'ok'.
+ok() ->
+ L = opaque_adt:atom_or_list(42),
+ foo(L).
+
+%-spec wrong() -> 'not_ok'.
+wrong() ->
+ A = opaque_adt:atom_or_list(1),
+ foo(A).
+
+foo(a) -> not_ok;
+foo([_|_]) -> ok.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/queue/queue_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/queue/queue_use.erl
new file mode 100644
index 0000000000..5682f2281e
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/queue/queue_use.erl
@@ -0,0 +1,66 @@
+-module(queue_use).
+
+-export([ok1/0, ok2/0]).
+-export([wrong1/0, wrong2/0, wrong3/0, wrong4/0, wrong5/0, wrong6/0, wrong7/0, wrong8/0]).
+
+ok1() ->
+ queue:is_empty(queue:new()).
+
+ok2() ->
+ Q0 = queue:new(),
+ Q1 = queue:in(42, Q0),
+ {{value, 42}, Q2} = queue:out(Q1),
+ queue:is_empty(Q2).
+
+%%--------------------------------------------------
+
+wrong1() ->
+ queue:is_empty({[],[]}).
+
+wrong2() ->
+ Q0 = {[],[]},
+ queue:in(42, Q0).
+
+wrong3() ->
+ Q0 = queue:new(),
+ Q1 = queue:in(42, Q0),
+ {[42],Q2} = Q1,
+ Q2.
+
+wrong4() ->
+ Q0 = queue:new(),
+ Q1 = queue:in(42, Q0),
+ Q1 =:= {[42],[]}.
+
+wrong5() ->
+ {F, _R} = queue:new(),
+ F.
+
+wrong6() ->
+ {{value, 42}, Q2} = queue:out({[42],[]}),
+ Q2.
+
+%%--------------------------------------------------
+
+-record(db, {p, q}).
+
+wrong7() ->
+ add_unique(42, #db{p = [], q = queue:new()}).
+
+add_unique(E, DB) ->
+ case is_in_queue(E, DB) of
+ true -> DB;
+ false -> DB#db{q = queue:in(E, DB#db.q)}
+ end.
+
+is_in_queue(P, #db{q = {L1,L2}}) ->
+ lists:member(P, L1) orelse lists:member(P, L2).
+
+%%--------------------------------------------------
+
+wrong8() ->
+ tuple_queue({42, gazonk}).
+
+tuple_queue({F, Q}) ->
+ queue:in(F, Q).
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_adt.erl
new file mode 100644
index 0000000000..f01cc5e519
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_adt.erl
@@ -0,0 +1,22 @@
+-module(rec_adt).
+
+-export([new/0, get_a/1, get_b/1, set_a/2, set_b/2]).
+
+-record(rec, {a :: atom(), b = 0 :: integer()}).
+
+-opaque rec() :: #rec{}.
+
+-spec new() -> rec().
+new() -> #rec{a = gazonk, b = 42}.
+
+-spec get_a(rec()) -> atom().
+get_a(#rec{a = A}) -> A.
+
+-spec get_b(rec()) -> integer().
+get_b(#rec{b = B}) -> B.
+
+-spec set_a(rec(), atom()) -> rec().
+set_a(R, A) -> R#rec{a = A}.
+
+-spec set_b(rec(), integer()) -> rec().
+set_b(R, B) -> R#rec{b = B}.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_use.erl
new file mode 100644
index 0000000000..358e9f918c
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/rec/rec_use.erl
@@ -0,0 +1,30 @@
+-module(rec_use).
+
+-export([ok1/0, ok2/0, wrong1/0, wrong2/0, wrong3/0, wrong4/0]).
+
+ok1() ->
+ rec_adt:set_a(rec_adt:new(), foo).
+
+ok2() ->
+ R1 = rec_adt:new(),
+ B1 = rec_adt:get_b(R1),
+ R2 = rec_adt:set_b(R1, 42),
+ B2 = rec_adt:get_b(R2),
+ B1 =:= B2.
+
+wrong1() ->
+ case rec_adt:new() of
+ {rec, _, 42} -> weird1;
+ R when tuple_size(R) =:= 3 -> weird2
+ end.
+
+wrong2() ->
+ R = list_to_tuple([rec, a, 42]),
+ rec_adt:get_a(R).
+
+wrong3() ->
+ R = rec_adt:new(),
+ R =:= {rec, gazonk, 42}.
+
+wrong4() ->
+ tuple_size(rec_adt:new()).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/timer/timer_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/timer/timer_use.erl
new file mode 100644
index 0000000000..9c8ea0af1c
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/timer/timer_use.erl
@@ -0,0 +1,20 @@
+%%---------------------------------------------------------------------------
+%% A test case with:
+%% - a genuine matching error -- 1st branch
+%% - a violation of the opaqueness of timer:tref() -- 2nd branch
+%% - a subtle violation of the opaqueness of timer:tref() -- 3rd branch
+%% The test is supposed to check that these cases are treated properly.
+%%---------------------------------------------------------------------------
+
+-module(timer_use).
+-export([wrong/0]).
+
+-spec wrong() -> error.
+
+wrong() ->
+ case timer:kill_after(42, self()) of
+ gazonk -> weird;
+ {ok, 42} -> weirder;
+ {Tag, gazonk} when Tag =/= error -> weirdest;
+ {error, _} -> error
+ end.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_adt.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_adt.erl
new file mode 100644
index 0000000000..5ca3202bba
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_adt.erl
@@ -0,0 +1,19 @@
+-module(union_adt).
+-export([new/1, new_a/1, new_rec/1]).
+
+-record(rec, {x = 42 :: integer()}).
+
+-opaque u() :: 'aaa' | 'bbb' | #rec{}.
+
+new(a) -> aaa;
+new(b) -> bbb;
+new(X) when is_integer(X) ->
+ #rec{x = X}.
+
+%% the following two functions (and their uses in union_use.erl) test
+%% that the return type is the opaque one and not just a subtype of it
+
+new_a(a) -> aaa.
+
+new_rec(X) when is_integer(X) ->
+ #rec{x = X}.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_use.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_use.erl
new file mode 100644
index 0000000000..6a103279cd
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/union/union_use.erl
@@ -0,0 +1,16 @@
+-module(union_use).
+
+-export([test/1, wrong_a/0, wrong_rec/0]).
+
+test(X) ->
+ case union_adt:new(X) of
+ A when is_atom(A) -> atom;
+ T when is_tuple(T) -> tuple
+ end.
+
+wrong_a() ->
+ aaa = union_adt:new_a(a),
+ ok.
+
+wrong_rec() ->
+ is_tuple(union_adt:new_rec(42)).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings.hrl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings.hrl
new file mode 100644
index 0000000000..b9339a8eb1
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings.hrl
@@ -0,0 +1,205 @@
+%%
+%% wings.hrl --
+%%
+%% Global record definition and defines.
+%%
+%% Copyright (c) 2001-2005 Bjorn Gustavsson
+%%
+%% See the file "license.terms" for information on usage and redistribution
+%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
+%%
+%% $Id: wings.hrl,v 1.1 2009/01/25 18:55:33 kostis Exp $
+%%
+
+-include("wings_intl.hrl").
+
+-ifdef(NEED_ESDL).
+-include_lib("esdl/include/sdl.hrl").
+-include_lib("esdl/include/sdl_events.hrl").
+-include_lib("esdl/include/sdl_video.hrl").
+-include_lib("esdl/include/sdl_keyboard.hrl").
+-include_lib("esdl/include/sdl_mouse.hrl").
+-include_lib("esdl/src/sdl_util.hrl").
+-define(CTRL_BITS, ?KMOD_CTRL).
+-define(ALT_BITS, ?KMOD_ALT).
+-define(SHIFT_BITS, ?KMOD_SHIFT).
+-define(META_BITS, ?KMOD_META).
+-endif.
+
+-define(WINGS_VERSION, ?wings_version).
+
+-define(CHAR_HEIGHT, wings_text:height()).
+-define(CHAR_WIDTH, wings_text:width()).
+
+-define(LINE_HEIGHT, (?CHAR_HEIGHT+2)).
+-define(GROUND_GRID_SIZE, 1).
+-define(CAMERA_DIST, (8.0*?GROUND_GRID_SIZE)).
+-define(NORMAL_LINEWIDTH, 1.0).
+-define(DEGREE, 176). %Degree character.
+
+-define(HIT_BUF_SIZE, (1024*1024)).
+
+-define(PANE_COLOR, {0.52,0.52,0.52}).
+-define(BEVEL_HIGHLIGHT, {0.9,0.9,0.9}).
+-define(BEVEL_LOWLIGHT, {0.3,0.3,0.3}).
+-define(BEVEL_HIGHLIGHT_MIX, 0.5).
+-define(BEVEL_LOWLIGHT_MIX, 0.5).
+
+-define(SLOW(Cmd), begin wings_io:hourglass(), Cmd end).
+-define(TC(Cmd), wings_util:tc(fun() -> Cmd end, ?MODULE, ?LINE)).
+
+-ifdef(DEBUG).
+-define(ASSERT(E), case E of
+ true -> ok;
+ _ ->
+ erlang:error({assertion_failed,?MODULE,?LINE})
+ end).
+-define(CHECK_ERROR(), wings_gl:check_error(?MODULE, ?LINE)).
+-else.
+-define(ASSERT(E),ok).
+-define(CHECK_ERROR(), ok).
+-endif.
+
+%% Display lists per object.
+%% Important: Plain integers and integers in lists will be assumed to
+%% be display lists. Arbitrary integers must be stored inside a tuple
+%% or record to not be interpreted as a display list.
+-record(dlo,
+ {work=none, %Workmode faces.
+ smooth=none, %Smooth-shaded faces.
+ edges=none, %Edges and wire-frame.
+ vs=none, %Unselected vertices.
+ hard=none, %Hard edges.
+ sel=none, %Selected items.
+ orig_sel=none, %Original selection.
+ normals=none, %Normals.
+ pick=none, %For picking.
+ proxy_faces=none, %Smooth proxy faces.
+ proxy_edges=none, %Smooth proxy edges.
+
+ %% Miscellanous.
+ hilite=none, %Hilite display list.
+ mirror=none, %Virtual mirror data.
+ ns=none, %Normals/positions per face.
+
+ %% Source for display lists.
+ src_we=none, %Source object.
+ src_sel=none, %Source selection.
+ orig_mode=none, %Original selection mode.
+ split=none, %Split data.
+ drag=none, %For dragging.
+ transparent=false, %Object includes transparancy.
+ proxy_data=none, %Data for smooth proxy.
+ open=false, %Open (has hole).
+
+ %% List of display lists known to be needed only based
+ %% on display modes, not whether the lists themselves exist.
+ %% Example: [work,edges]
+ needed=[]
+ }).
+
+%% Main state record containing all objects and other important state.
+-record(st,
+ {shapes, %All visible shapes
+ selmode, %Selection mode:
+ % vertex, edge, face, body
+ sh=false, %Smart highlight active: true|false
+ sel=[], %Current sel: [{Id,GbSet}]
+ ssels=[], %Saved selections:
+ % [{Name,Mode,GbSet}]
+ temp_sel=none, %Selection only temporary?
+
+ mat, %Defined materials (GbTree).
+ pal=[], %Palette
+ file, %Current filename.
+ saved, %True if model has been saved.
+ onext, %Next object id to use.
+ bb=none, %Saved bounding box.
+ edge_loop=none, %Previous edge loop.
+ views={0,{}}, %{Current,TupleOfViews}
+ pst=gb_trees:empty(), %Plugin State Info
+ % gb_tree where key is plugin module
+
+ %% Previous commands.
+ repeatable, %Last repeatable command.
+ ask_args, %Ask arguments.
+ drag_args, %Drag arguments for command.
+ def, %Default operations.
+
+ %% Undo information.
+ top, %Top of stack.
+ bottom, %Bottom of stack.
+ next_is_undo, %State of undo/redo toggle.
+ undone %States that were undone.
+ }).
+
+%% The Winged-Edge data structure.
+%% See http://www.cs.mtu.edu/~shene/COURSES/cs3621/NOTES/model/winged-e.html
+-record(we,
+ {id, %Shape id.
+ perm=0, %Permissions:
+ % 0 - Everything allowed.
+ % 1 - Visible, can't select.
+ % [] or {Mode,GbSet} -
+ % Invisible, can't select.
+ % The GbSet contains the
+ % object's selection.
+ name, %Name.
+ es, %gb_tree containing edges
+ fs, %gb_tree containing faces
+ he, %gb_sets containing hard edges
+ vc, %Connection info (=incident edge)
+ % for vertices.
+ vp, %Vertex positions.
+ pst=gb_trees:empty(), %Plugin State Info,
+ % gb_tree where key is plugin module
+ mat=default, %Materials.
+ next_id, %Next free ID for vertices,
+ % edges, and faces.
+ % (Needed because we never re-use
+ % IDs.)
+ mode, %'vertex'/'material'/'uv'
+ mirror=none, %Mirror: none|Face
+ light=none, %Light data: none|Light
+ has_shape=true %true|false
+ }).
+
+-define(IS_VISIBLE(Perm), (Perm =< 1)).
+-define(IS_NOT_VISIBLE(Perm), (Perm > 1)).
+-define(IS_SELECTABLE(Perm), (Perm == 0)).
+-define(IS_NOT_SELECTABLE(Perm), (Perm =/= 0)).
+
+-define(IS_LIGHT(We), ((We#we.light =/= none) and (not We#we.has_shape))).
+-define(IS_ANY_LIGHT(We), (We#we.light =/= none)).
+-define(HAS_SHAPE(We), (We#we.has_shape)).
+%-define(IS_LIGHT(We), (We#we.light =/= none)).
+%-define(IS_NOT_LIGHT(We), (We#we.light =:= none)).
+
+%% Edge in a winged-edge shape.
+-record(edge,
+ {vs, %Start vertex for edge
+ ve, %End vertex for edge
+ a=none, %Color or UV coordinate.
+ b=none, %Color or UV coordinate.
+ lf, %Left face
+ rf, %Right face
+ ltpr, %Left traversal predecessor
+ ltsu, %Left traversal successor
+ rtpr, %Right traversal predecessor
+ rtsu %Right traversal successor
+ }).
+
+%% The current view/camera.
+-record(view,
+ {origin,
+ distance, % From origo.
+ azimuth,
+ elevation,
+ pan_x, %Panning in X direction.
+ pan_y, %Panning in Y direction.
+ along_axis=none, %Which axis viewed along.
+ fov, %Field of view.
+ hither, %Near clipping plane.
+ yon %Far clipping plane.
+ }).
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_dissolve.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_dissolve.erl
new file mode 100644
index 0000000000..d7af9bb1d3
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_dissolve.erl
@@ -0,0 +1,375 @@
+%%
+%% wings_dissolve.erl --
+%%
+%% This module implements dissolve of faces.
+%%
+
+-module(wings_dissolve).
+
+-export([faces/2, complement/2]).
+
+-include("wings.hrl").
+
+%% faces([Face], We) -> We'
+%% Dissolve the given faces.
+faces([], We) -> We;
+faces(Faces, #we{fs=Ftab0}=We) ->
+ case gb_sets:is_empty(Faces) of
+ true -> We;
+ false when is_list(Faces) ->
+ Complement = ordsets:subtract(gb_trees:keys(Ftab0),
+ ordsets:from_list(Faces)),
+ dissolve_1(Faces, Complement, We);
+ false ->
+ Complement = ordsets:subtract(gb_trees:keys(Ftab0),
+ gb_sets:to_list(Faces)),
+ dissolve_1(Faces, Complement, We)
+ end.
+
+faces([], _, We) -> We;
+faces(Faces,Complement,We) ->
+ case gb_sets:is_empty(Faces) of
+ true -> We;
+ false -> dissolve_1(Faces, Complement,We)
+ end.
+
+dissolve_1(Faces, Complement, We0) ->
+ We1 = optimistic_dissolve(Faces,Complement,We0#we{vc=undefined}),
+ NewFaces = wings_we:new_items_as_ordset(face, We0, We1),
+ We2 = wings_face:delete_bad_faces(NewFaces, We1),
+ We = wings_we:rebuild(We2),
+ case wings_we:is_consistent(We) of
+ true ->
+ We;
+ false ->
+ io:format("Dissolving would cause an inconsistent object structure.")
+ end.
+
+%% complement([Face], We) -> We'
+%% Dissolve all faces BUT the given faces. Also invalidate the
+%% mirror face if it existed and was dissolved.
+complement(Fs0, #we{fs=Ftab0}=We0) when is_list(Fs0) ->
+ Fs = ordsets:subtract(gb_trees:keys(Ftab0), ordsets:from_list(Fs0)),
+ case faces(Fs, Fs0, We0) of
+ #we{mirror=none}=We -> We;
+ #we{mirror=Face,fs=Ftab}=We ->
+ case gb_trees:is_defined(Face, Ftab) of
+ false -> We;
+ true -> We#we{mirror=none}
+ end
+ end;
+complement(Fs, We) -> complement(gb_sets:to_list(Fs), We).
+
+optimistic_dissolve(Faces0, Compl, We0) ->
+ %% Optimistically assume that we have a simple region without
+ %% any holes.
+ case outer_edge_loop(Faces0, We0) of
+ error ->
+ %% Assumption was wrong. We need to partition the selection
+ %% and dissolve each partition in turn.
+ Parts = wings_sel:face_regions(Faces0, We0),
+ complex_dissolve(Parts, We0);
+ [_|_]=Loop ->
+ %% Assumption was correct.
+ simple_dissolve(Faces0, Compl, Loop, We0)
+ end.
+
+%% simple_dissolve(Faces, Loop, We0) -> We
+%% Dissolve a region of faces with no holes and no
+%% repeated vertices in the outer edge loop.
+
+simple_dissolve(Faces0, Compl, Loop, We0) ->
+ Faces = to_gb_set(Faces0),
+ OldFace = gb_sets:smallest(Faces),
+ Mat = wings_facemat:face(OldFace, We0),
+ We1 = fix_materials(Faces, Compl, We0),
+ #we{es=Etab0,fs=Ftab0,he=Htab0} = We1,
+ {Ftab1,Etab1,Htab} = simple_del(Faces, Ftab0, Etab0, Htab0, We1),
+ {NewFace,We2} = wings_we:new_id(We1),
+ Ftab = gb_trees:insert(NewFace, hd(Loop), Ftab1),
+ Last = lists:last(Loop),
+ Etab = update_outer([Last|Loop], Loop, NewFace, Ftab, Etab1),
+ We = We2#we{es=Etab,fs=Ftab,he=Htab},
+ wings_facemat:assign(Mat, [NewFace], We).
+
+fix_materials(Del,Keep,We) ->
+ case gb_sets:size(Del) < length(Keep) of
+ true ->
+ wings_facemat:delete_faces(Del,We);
+ false ->
+ wings_facemat:keep_faces(Keep,We)
+ end.
+
+to_gb_set(List) when is_list(List) ->
+ gb_sets:from_list(List);
+to_gb_set(S) -> S.
+
+%% Delete faces and inner edges for a simple region.
+simple_del(Faces, Ftab0, Etab0, Htab0, We) ->
+ case {gb_trees:size(Ftab0),gb_sets:size(Faces)} of
+ {AllSz,FaceSz} when AllSz < 2*FaceSz ->
+ %% At least half of the faces are selected.
+ %% It is faster to find the edges for the
+ %% unselected faces.
+ UnselFaces = ordsets:subtract(gb_trees:keys(Ftab0),
+ gb_sets:to_list(Faces)),
+
+ UnselSet = sofs:from_external(UnselFaces, [face]),
+ Ftab1 = sofs:from_external(gb_trees:to_list(Ftab0),
+ [{face,edge}]),
+ Ftab2 = sofs:restriction(Ftab1, UnselSet),
+ Ftab = gb_trees:from_orddict(sofs:to_external(Ftab2)),
+
+ Keep0 = wings_face:to_edges(UnselFaces, We),
+ Keep = sofs:set(Keep0, [edge]),
+ Etab1 = sofs:from_external(gb_trees:to_list(Etab0),
+ [{edge,info}]),
+ Etab2 = sofs:restriction(Etab1, Keep),
+ Etab = gb_trees:from_orddict(sofs:to_external(Etab2)),
+
+ Htab = simple_del_hard(Htab0, sofs:to_external(Keep), undefined),
+ {Ftab,Etab,Htab};
+ {_,_} ->
+ Ftab = lists:foldl(fun(Face, Ft) ->
+ gb_trees:delete(Face, Ft)
+ end, Ftab0, gb_sets:to_list(Faces)),
+ Inner = wings_face:inner_edges(Faces, We),
+ Etab = lists:foldl(fun(Edge, Et) ->
+ gb_trees:delete(Edge, Et)
+ end, Etab0, Inner),
+ Htab = simple_del_hard(Htab0, undefined, Inner),
+ {Ftab,Etab,Htab}
+ end.
+
+simple_del_hard(Htab, Keep, Remove) ->
+ case gb_sets:is_empty(Htab) of
+ true -> Htab;
+ false -> simple_del_hard_1(Htab, Keep, Remove)
+ end.
+
+simple_del_hard_1(Htab, Keep, undefined) ->
+ gb_sets:intersection(Htab, gb_sets:from_ordset(Keep));
+simple_del_hard_1(Htab, undefined, Remove) ->
+ gb_sets:difference(Htab, gb_sets:from_ordset(Remove)).
+
+%% complex([Partition], We0) -> We0
+%% The general dissolve.
+
+complex_dissolve([Faces|T], We0) ->
+ Face = gb_sets:smallest(Faces),
+ Mat = wings_facemat:face(Face, We0),
+ We1 = wings_facemat:delete_faces(Faces, We0),
+ Parts = outer_edge_partition(Faces, We1),
+ We = do_dissolve(Faces, Parts, Mat, We0, We1),
+ complex_dissolve(T, We);
+complex_dissolve([], We) -> We.
+
+do_dissolve(Faces, Ess, Mat, WeOrig, We0) ->
+ We1 = do_dissolve_faces(Faces, We0),
+ Inner = wings_face:inner_edges(Faces, WeOrig),
+ We2 = delete_inner(Inner, We1),
+ #we{he=Htab0} = We = do_dissolve_1(Ess, Mat, We2),
+ Htab = gb_sets:difference(Htab0, gb_sets:from_list(Inner)),
+ We#we{he=Htab}.
+
+do_dissolve_1([EdgeList|Ess], Mat, #we{es=Etab0,fs=Ftab0}=We0) ->
+ {Face,We1} = wings_we:new_id(We0),
+ Ftab = gb_trees:insert(Face, hd(EdgeList), Ftab0),
+ Last = lists:last(EdgeList),
+ Etab = update_outer([Last|EdgeList], EdgeList, Face, Ftab, Etab0),
+ We2 = We1#we{es=Etab,fs=Ftab},
+ We = wings_facemat:assign(Mat, [Face], We2),
+ do_dissolve_1(Ess, Mat, We);
+do_dissolve_1([], _Mat, We) -> We.
+
+do_dissolve_faces(Faces, #we{fs=Ftab0}=We) ->
+ Ftab = lists:foldl(fun(Face, Ft) ->
+ gb_trees:delete(Face, Ft)
+ end, Ftab0, gb_sets:to_list(Faces)),
+ We#we{fs=Ftab}.
+
+delete_inner(Inner, #we{es=Etab0}=We) ->
+ Etab = lists:foldl(fun(Edge, Et) ->
+ gb_trees:delete(Edge, Et)
+ end, Etab0, Inner),
+ We#we{es=Etab}.
+
+update_outer([Pred|[Edge|Succ]=T], More, Face, Ftab, Etab0) ->
+ #edge{rf=Rf} = R0 = gb_trees:get(Edge, Etab0),
+ Rec = case gb_trees:is_defined(Rf, Ftab) of
+ true ->
+ ?ASSERT(false == gb_trees:is_defined(R0#edge.lf, Ftab)),
+ LS = succ(Succ, More),
+ R0#edge{lf=Face,ltpr=Pred,ltsu=LS};
+ false ->
+ ?ASSERT(true == gb_trees:is_defined(R0#edge.lf, Ftab)),
+ RS = succ(Succ, More),
+ R0#edge{rf=Face,rtpr=Pred,rtsu=RS}
+ end,
+ Etab = gb_trees:update(Edge, Rec, Etab0),
+ update_outer(T, More, Face, Ftab, Etab);
+update_outer([_], _More, _Face, _Ftab, Etab) -> Etab.
+
+succ([Succ|_], _More) -> Succ;
+succ([], [Succ|_]) -> Succ.
+
+%% outer_edge_loop(FaceSet,WingedEdge) -> [Edge] | error.
+%% Partition the outer edges of the FaceSet into a single closed loop.
+%% Return 'error' if the faces in FaceSet does not form a
+%% simple region without holes.
+%%
+%% Equvivalent to
+%% case outer_edge_partition(FaceSet,WingedEdge) of
+%% [Loop] -> Loop;
+%% [_|_] -> error
+%% end.
+%% but faster.
+
+outer_edge_loop(Faces, We) ->
+ case lists:sort(collect_outer_edges(Faces, We)) of
+ [] -> error;
+ [{Key,Val}|Es0] ->
+ case any_duplicates(Es0, Key) of
+ false ->
+ Es = gb_trees:from_orddict(Es0),
+ N = gb_trees:size(Es),
+ outer_edge_loop_1(Val, Es, Key, N, []);
+ true -> error
+ end
+ end.
+
+outer_edge_loop_1({Edge,V}, _, V, 0, Acc) ->
+ %% This edge completes the loop, and we have used all possible edges.
+ [Edge|Acc];
+outer_edge_loop_1({_,V}, _, V, _N, _) ->
+ %% Loop is complete, but we haven't used all edges.
+ error;
+outer_edge_loop_1({_,_}, _, _, 0, _) ->
+ %% We have used all possible edges, but somehow the loop
+ %% is not complete. I can't see how this is possible.
+ erlang:error(internal_error);
+outer_edge_loop_1({Edge,Vb}, Es, EndV, N, Acc0) ->
+ Acc = [Edge|Acc0],
+ outer_edge_loop_1(gb_trees:get(Vb, Es), Es, EndV, N-1, Acc).
+
+any_duplicates([{V,_}|_], V) -> true;
+any_duplicates([_], _) -> false;
+any_duplicates([{V,_}|Es], _) -> any_duplicates(Es, V).
+
+%% outer_edge_partition(FaceSet, WingedEdge) -> [[Edge]].
+%% Partition the outer edges of the FaceSet. Each partion
+%% of edges form a closed loop with no repeated vertices.
+%% Outer edges are edges that have one face in FaceSet
+%% and one outside.
+%% It is assumed that FaceSet consists of one region returned by
+%% wings_sel:face_regions/2.
+
+outer_edge_partition(Faces, We) ->
+ F0 = collect_outer_edges(Faces, We),
+ F = gb_trees:from_orddict(wings_util:rel2fam(F0)),
+ partition_edges(F, []).
+
+collect_outer_edges(Faces, We) when is_list(Faces) ->
+ collect_outer_edges_1(Faces, gb_sets:from_list(Faces), We);
+collect_outer_edges(Faces, We) ->
+ collect_outer_edges_1(gb_sets:to_list(Faces), Faces, We).
+
+collect_outer_edges_1(Fs0, Faces0, #we{fs=Ftab}=We) ->
+ case {gb_trees:size(Ftab),gb_sets:size(Faces0)} of
+ {AllSz,FaceSz} when AllSz < 2*FaceSz ->
+ Fs = ordsets:subtract(gb_trees:keys(Ftab), Fs0),
+ Faces = gb_sets:from_ordset(Fs),
+ Coll = collect_outer_edges_a(Faces),
+ wings_face:fold_faces(Coll, [], Fs, We);
+ {_,_} ->
+ Coll = collect_outer_edges_b(Faces0),
+ wings_face:fold_faces(Coll, [], Fs0, We)
+ end.
+
+collect_outer_edges_a(Faces) ->
+ fun(Face, _, Edge, #edge{ve=V,vs=OtherV,lf=Face,rf=Other}, Acc) ->
+ case gb_sets:is_member(Other, Faces) of
+ false -> [{V,{Edge,OtherV}}|Acc];
+ true -> Acc
+ end;
+ (Face, _, Edge, #edge{ve=OtherV,vs=V,rf=Face,lf=Other}, Acc) ->
+ case gb_sets:is_member(Other, Faces) of
+ false -> [{V,{Edge,OtherV}}|Acc];
+ true -> Acc
+ end
+ end.
+
+collect_outer_edges_b(Faces) ->
+ fun(Face, _, Edge, #edge{vs=V,ve=OtherV,lf=Face,rf=Other}, Acc) ->
+ case gb_sets:is_member(Other, Faces) of
+ false -> [{V,{Edge,OtherV}}|Acc];
+ true -> Acc
+ end;
+ (Face, _, Edge, #edge{vs=OtherV,ve=V,rf=Face,lf=Other}, Acc) ->
+ case gb_sets:is_member(Other, Faces) of
+ false -> [{V,{Edge,OtherV}}|Acc];
+ true -> Acc
+ end
+ end.
+
+partition_edges(Es0, Acc) ->
+ case gb_trees:is_empty(Es0) of
+ true -> Acc;
+ false ->
+ {Key,Val,Es1} = gb_trees:take_smallest(Es0),
+ {Cycle,Es} = part_collect_cycle(Key, Val, Es1, []),
+ partition_edges(Es, [Cycle|Acc])
+ end.
+
+%% part_collect_cycle(Vertex, VertexInfo, EdgeInfo, Acc0) ->
+%% none | {[Edge],EdgeInfo}
+%% Collect the cycle starting with Vertex.
+%%
+%% Note: This function can only return 'none' when called
+%% recursively.
+
+part_collect_cycle(_, repeated, _, _) ->
+ %% Repeated vertex - we are not allowed to go this way.
+ %% Can only happen if we were called recursively because
+ %% a fork was encountered.
+ none;
+part_collect_cycle(_Va, [{Edge,Vb}], Es0, Acc0) ->
+ %% Basic case. Only one way to go.
+ Acc = [Edge|Acc0],
+ case gb_trees:lookup(Vb, Es0) of
+ none ->
+ {Acc,Es0};
+ {value,Val} ->
+ Es = gb_trees:delete(Vb, Es0),
+ part_collect_cycle(Vb, Val, Es, Acc)
+ end;
+part_collect_cycle(Va, [Val|More], Es0, []) ->
+ %% No cycle started yet and we have multiple choice of
+ %% edges out from this vertex. It doesn't matter which
+ %% edge we follow, so we'll follow the first one.
+ {Cycle,Es} = part_collect_cycle(Va, [Val], Es0, []),
+ {Cycle,gb_trees:insert(Va, More, Es)};
+part_collect_cycle(Va, Edges, Es0, Acc) ->
+ %% We have a partially collected cycle and we have a
+ %% fork (multiple choice of edges). Here we must choose
+ %% an edge that closes the cycle without passing Va
+ %% again (because repeated vertices are not allowed).
+ Es = gb_trees:insert(Va, repeated, Es0),
+ part_fork(Va, Edges, Es, Acc, []).
+
+part_fork(Va, [Val|More], Es0, Acc, Tried) ->
+ %% Try to complete the cycle by following this edge.
+ case part_collect_cycle(Va, [Val], Es0, Acc) of
+ none ->
+ %% Failure - try the next edge.
+ part_fork(Va, More, Es0, Acc, [Val|Tried]);
+ {Cycle,Es} ->
+ %% Found a cycle. Update the vertex information
+ %% with all edges remaining.
+ {Cycle,gb_trees:update(Va, lists:reverse(Tried, More), Es)}
+ end;
+part_fork(_, [], _, _, _) ->
+ %% None of edges were possible. Can only happen if this function
+ %% was called recursively (i.e. if we hit another fork while
+ %% processing a fork).
+ none.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge.erl
new file mode 100644
index 0000000000..3483acb711
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge.erl
@@ -0,0 +1,243 @@
+%%
+%% wings_edge.erl --
+%%
+%% This module contains most edge command and edge utility functions.
+%%
+%% Copyright (c) 2001-2008 Bjorn Gustavsson.
+%%
+%% See the file "license.terms" for information on usage and redistribution
+%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
+%%
+%% $Id: wings_edge.erl,v 1.1 2009/01/25 18:55:33 kostis Exp $
+%%
+
+-module(wings_edge).
+
+-export([dissolve_edges/2]).
+
+-include("wings.hrl").
+
+%%%
+%%% Dissolve.
+%%%
+
+dissolve_edges(Edges0, We0) when is_list(Edges0) ->
+ #we{es=Etab} = We1 = lists:foldl(fun internal_dissolve_edge/2, We0, Edges0),
+ case [E || E <- Edges0, gb_trees:is_defined(E, Etab)] of
+ Edges0 ->
+ %% No edge was deleted in the last pass. We are done.
+ We = wings_we:rebuild(We0#we{vc=undefined}),
+ wings_we:validate_mirror(We);
+ Edges ->
+ dissolve_edges(Edges, We1)
+ end;
+dissolve_edges(Edges, We) ->
+ dissolve_edges(gb_sets:to_list(Edges), We).
+
+internal_dissolve_edge(Edge, #we{es=Etab}=We0) ->
+ case gb_trees:lookup(Edge, Etab) of
+ none -> We0;
+ {value,#edge{ltpr=Same,ltsu=Same,rtpr=Same,rtsu=Same}} ->
+ Empty = gb_trees:empty(),
+ We0#we{vc=Empty,vp=Empty,es=Empty,fs=Empty,he=gb_sets:empty()};
+ {value,#edge{rtpr=Back,ltsu=Back}=Rec} ->
+ merge_edges(backward, Edge, Rec, We0);
+ {value,#edge{rtsu=Forward,ltpr=Forward}=Rec} ->
+ merge_edges(forward, Edge, Rec, We0);
+ {value,Rec} ->
+ try dissolve_edge_1(Edge, Rec, We0) of
+ We -> We
+ catch
+ throw:hole -> We0
+ end
+ end.
+
+%% dissolve_edge_1(Edge, EdgeRecord, We) -> We
+%% Remove an edge and a face. If one of the faces is degenerated
+%% (only consists of two edges), remove that one. Otherwise, it
+%% doesn't matter which face we remove.
+dissolve_edge_1(Edge, #edge{lf=Remove,rf=Keep,ltpr=Same,ltsu=Same}=Rec, We) ->
+ dissolve_edge_2(Edge, Remove, Keep, Rec, We);
+dissolve_edge_1(Edge, #edge{lf=Keep,rf=Remove}=Rec, We) ->
+ dissolve_edge_2(Edge, Remove, Keep, Rec, We).
+
+dissolve_edge_2(Edge, FaceRemove, FaceKeep,
+ #edge{ltpr=LP,ltsu=LS,rtpr=RP,rtsu=RS},
+ #we{fs=Ftab0,es=Etab0,he=Htab0}=We0) ->
+ %% First change face for all edges surrounding the face we will remove.
+ Etab1 = wings_face:fold(
+ fun (_, E, _, IntEtab) when E =:= Edge -> IntEtab;
+ (_, E, R, IntEtab) ->
+ case R of
+ #edge{lf=FaceRemove,rf=FaceKeep} ->
+ throw(hole);
+ #edge{rf=FaceRemove,lf=FaceKeep} ->
+ throw(hole);
+ #edge{lf=FaceRemove} ->
+ gb_trees:update(E, R#edge{lf=FaceKeep}, IntEtab);
+ #edge{rf=FaceRemove} ->
+ gb_trees:update(E, R#edge{rf=FaceKeep}, IntEtab)
+ end
+ end, Etab0, FaceRemove, We0),
+
+ %% Patch all predecessors and successor of the edge we will remove.
+ Etab2 = patch_edge(LP, RS, Edge, Etab1),
+ Etab3 = patch_edge(LS, RP, Edge, Etab2),
+ Etab4 = patch_edge(RP, LS, Edge, Etab3),
+ Etab5 = patch_edge(RS, LP, Edge, Etab4),
+
+ %% Remove the edge.
+ Etab = gb_trees:delete(Edge, Etab5),
+ Htab = hardness(Edge, soft, Htab0),
+
+ %% Remove the face. Patch the face entry for the remaining face.
+ Ftab1 = gb_trees:delete(FaceRemove, Ftab0),
+ We1 = wings_facemat:delete_face(FaceRemove, We0),
+ Ftab = gb_trees:update(FaceKeep, LP, Ftab1),
+
+ %% Return result.
+ We = We1#we{es=Etab,fs=Ftab,vc=undefined,he=Htab},
+ AnEdge = gb_trees:get(FaceKeep, Ftab),
+ case gb_trees:get(AnEdge, Etab) of
+ #edge{lf=FaceKeep,ltpr=Same,ltsu=Same} ->
+ internal_dissolve_edge(AnEdge, We);
+ #edge{rf=FaceKeep,rtpr=Same,rtsu=Same} ->
+ internal_dissolve_edge(AnEdge, We);
+ _Other ->
+ case wings_we:is_face_consistent(FaceKeep, We) of
+ true ->
+ We;
+ false ->
+ io:format("Dissolving would cause a badly formed face.")
+ end
+ end.
+
+%%
+%% We like winged edges, but not winged vertices (a vertex with
+%% only two edges connected to it). We will remove the winged vertex
+%% by joining the two edges connected to it.
+%%
+
+merge_edges(Dir, Edge, Rec, #we{es=Etab}=We) ->
+ {Va,Vb,_,_,_,_,To,To} = half_edge(Dir, Rec),
+ case gb_trees:get(To, Etab) of
+ #edge{vs=Va,ve=Vb} ->
+ del_2edge_face(Dir, Edge, Rec, To, We);
+ #edge{vs=Vb,ve=Va} ->
+ del_2edge_face(Dir, Edge, Rec, To, We);
+ _Other ->
+ merge_1(Dir, Edge, Rec, To, We)
+ end.
+
+merge_1(Dir, Edge, Rec, To, #we{es=Etab0,fs=Ftab0,he=Htab0}=We) ->
+ OtherDir = reverse_dir(Dir),
+ {Vkeep,Vdelete,Lf,Rf,A,B,L,R} = half_edge(OtherDir, Rec),
+ Etab1 = patch_edge(L, To, Edge, Etab0),
+ Etab2 = patch_edge(R, To, Edge, Etab1),
+ Etab3 = patch_half_edge(To, Vkeep, Lf, A, L, Rf, B, R, Vdelete, Etab2),
+ Htab = hardness(Edge, soft, Htab0),
+ Etab = gb_trees:delete(Edge, Etab3),
+ #edge{lf=Lf,rf=Rf} = Rec,
+ Ftab1 = update_face(Lf, To, Edge, Ftab0),
+ Ftab = update_face(Rf, To, Edge, Ftab1),
+ merge_2(To, We#we{es=Etab,fs=Ftab,he=Htab,vc=undefined}).
+
+merge_2(Edge, #we{es=Etab}=We) ->
+ %% If the merged edge is part of a two-edge face, we must
+ %% remove that edge too.
+ case gb_trees:get(Edge, Etab) of
+ #edge{ltpr=Same,ltsu=Same} ->
+ internal_dissolve_edge(Edge, We);
+ #edge{rtpr=Same,rtsu=Same} ->
+ internal_dissolve_edge(Edge, We);
+ _Other -> We
+ end.
+
+update_face(Face, Edge, OldEdge, Ftab) ->
+ case gb_trees:get(Face, Ftab) of
+ OldEdge -> gb_trees:update(Face, Edge, Ftab);
+ _Other -> Ftab
+ end.
+
+del_2edge_face(Dir, EdgeA, RecA, EdgeB,
+ #we{es=Etab0,fs=Ftab0,he=Htab0}=We) ->
+ {_,_,Lf,Rf,_,_,_,_} = half_edge(reverse_dir(Dir), RecA),
+ RecB = gb_trees:get(EdgeB, Etab0),
+ Del = gb_sets:from_list([EdgeA,EdgeB]),
+ EdgeANear = stabile_neighbor(RecA, Del),
+ EdgeBNear = stabile_neighbor(RecB, Del),
+ Etab1 = patch_edge(EdgeANear, EdgeBNear, EdgeA, Etab0),
+ Etab2 = patch_edge(EdgeBNear, EdgeANear, EdgeB, Etab1),
+ Etab3 = gb_trees:delete(EdgeA, Etab2),
+ Etab = gb_trees:delete(EdgeB, Etab3),
+
+ %% Patch hardness table.
+ Htab1 = hardness(EdgeA, soft, Htab0),
+ Htab = hardness(EdgeB, soft, Htab1),
+
+ %% Patch the face table.
+ #edge{lf=Klf,rf=Krf} = gb_trees:get(EdgeANear, Etab),
+ KeepFaces = ordsets:from_list([Klf,Krf]),
+ EdgeAFaces = ordsets:from_list([Lf,Rf]),
+ [DelFace] = ordsets:subtract(EdgeAFaces, KeepFaces),
+ Ftab1 = gb_trees:delete(DelFace, Ftab0),
+ [KeepFace] = ordsets:intersection(KeepFaces, EdgeAFaces),
+ Ftab2 = update_face(KeepFace, EdgeANear, EdgeA, Ftab1),
+ Ftab = update_face(KeepFace, EdgeBNear, EdgeB, Ftab2),
+
+ %% Return result.
+ We#we{vc=undefined,es=Etab,fs=Ftab,he=Htab}.
+
+stabile_neighbor(#edge{ltpr=Ea,ltsu=Eb,rtpr=Ec,rtsu=Ed}, Del) ->
+ [Edge] = lists:foldl(fun(E, A) ->
+ case gb_sets:is_member(E, Del) of
+ true -> A;
+ false -> [E|A]
+ end
+ end, [], [Ea,Eb,Ec,Ed]),
+ Edge.
+
+%%%
+%%% Setting hard/soft edges.
+%%%
+
+hardness(Edge, soft, Htab) -> gb_sets:delete_any(Edge, Htab);
+hardness(Edge, hard, Htab) -> gb_sets:add(Edge, Htab).
+
+%%%
+%%% Utilities.
+%%%
+
+reverse_dir(forward) -> backward;
+reverse_dir(backward) -> forward.
+
+half_edge(backward, #edge{vs=Va,ve=Vb,lf=Lf,rf=Rf,a=A,b=B,ltsu=L,rtpr=R}) ->
+ {Va,Vb,Lf,Rf,A,B,L,R};
+half_edge(forward, #edge{ve=Va,vs=Vb,lf=Lf,rf=Rf,a=A,b=B,ltpr=L,rtsu=R}) ->
+ {Va,Vb,Lf,Rf,A,B,L,R}.
+
+patch_half_edge(Edge, V, FaceA, A, Ea, FaceB, B, Eb, OrigV, Etab) ->
+ New = case gb_trees:get(Edge, Etab) of
+ #edge{vs=OrigV,lf=FaceA,rf=FaceB}=Rec ->
+ Rec#edge{a=A,vs=V,ltsu=Ea,rtpr=Eb};
+ #edge{vs=OrigV,lf=FaceB,rf=FaceA}=Rec ->
+ Rec#edge{a=B,vs=V,ltsu=Eb,rtpr=Ea};
+ #edge{ve=OrigV,lf=FaceA,rf=FaceB}=Rec ->
+ Rec#edge{b=B,ve=V,ltpr=Ea,rtsu=Eb};
+ #edge{ve=OrigV,lf=FaceB,rf=FaceA}=Rec ->
+ Rec#edge{b=A,ve=V,ltpr=Eb,rtsu=Ea}
+ end,
+ gb_trees:update(Edge, New, Etab).
+
+patch_edge(Edge, ToEdge, OrigEdge, Etab) ->
+ New = case gb_trees:get(Edge, Etab) of
+ #edge{ltsu=OrigEdge}=R ->
+ R#edge{ltsu=ToEdge};
+ #edge{ltpr=OrigEdge}=R ->
+ R#edge{ltpr=ToEdge};
+ #edge{rtsu=OrigEdge}=R ->
+ R#edge{rtsu=ToEdge};
+ #edge{rtpr=OrigEdge}=R ->
+ R#edge{rtpr=ToEdge}
+ end,
+ gb_trees:update(Edge, New, Etab).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge_cmd.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge_cmd.erl
new file mode 100644
index 0000000000..e478ec245b
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_edge_cmd.erl
@@ -0,0 +1,91 @@
+%%
+%% wings_edge.erl --
+%%
+%% This module contains most edge command and edge utility functions.
+%%
+
+-module(wings_edge_cmd).
+
+-export([loop_cut/1]).
+
+-include("wings.hrl").
+
+%%%
+%%% The Loop Cut command.
+%%%
+
+loop_cut(St0) ->
+ {Sel,St} = wings_sel:fold(fun loop_cut/3, {[],St0}, St0),
+ wings_sel:set(body, Sel, St).
+
+loop_cut(Edges, #we{name=Name,id=Id,fs=Ftab}=We0, {Sel,St0}) ->
+ AdjFaces = wings_face:from_edges(Edges, We0),
+ case loop_cut_partition(AdjFaces, Edges, We0, []) of
+ [_] ->
+ io:format("Edge loop doesn't divide ~p into two parts.", [Name]);
+ Parts0 ->
+ %% We arbitrarily decide that the largest part of the object
+ %% will be left unselected and will keep the name of the object.
+
+ Parts1 = [{gb_trees:size(P),P} || P <- Parts0],
+ Parts2 = lists:reverse(lists:sort(Parts1)),
+ [_|Parts] = [gb_sets:to_list(P) || {_,P} <- Parts2],
+
+ %% Also, this first part will also contain any sub-object
+ %% that was not reachable from any of the edges. Therefore,
+ %% we calculate the first part as the complement of the union
+ %% of all other parts.
+
+ FirstComplement = ordsets:union(Parts),
+ First = ordsets:subtract(gb_trees:keys(Ftab), FirstComplement),
+
+ We = wings_dissolve:complement(First, We0),
+ Shs = St0#st.shapes,
+ St = St0#st{shapes=gb_trees:update(Id, We, Shs)},
+ loop_cut_make_copies(Parts, We0, Sel, St)
+ end.
+
+loop_cut_make_copies([P|Parts], We0, Sel0, #st{onext=Id}=St0) ->
+ Sel = [{Id,gb_sets:singleton(0)}|Sel0],
+ We = wings_dissolve:complement(P, We0),
+ St = wings_shape:insert(We, cut, St0),
+ loop_cut_make_copies(Parts, We0, Sel, St);
+loop_cut_make_copies([], _, Sel, St) -> {Sel,St}.
+
+loop_cut_partition(Faces0, Edges, We, Acc) ->
+ case gb_sets:is_empty(Faces0) of
+ true -> Acc;
+ false ->
+ {AFace,Faces1} = gb_sets:take_smallest(Faces0),
+ Reachable = collect_faces(AFace, Edges, We),
+ Faces = gb_sets:difference(Faces1, Reachable),
+ loop_cut_partition(Faces, Edges, We, [Reachable|Acc])
+ end.
+
+collect_faces(Face, Edges, We) ->
+ collect_faces(gb_sets:singleton(Face), We, Edges, gb_sets:empty()).
+
+collect_faces(Work0, We, Edges, Acc0) ->
+ case gb_sets:is_empty(Work0) of
+ true -> Acc0;
+ false ->
+ {Face,Work1} = gb_sets:take_smallest(Work0),
+ Acc = gb_sets:insert(Face, Acc0),
+ Work = collect_maybe_add(Work1, Face, Edges, We, Acc),
+ collect_faces(Work, We, Edges, Acc)
+ end.
+
+collect_maybe_add(Work, Face, Edges, We, Res) ->
+ wings_face:fold(
+ fun(_, Edge, Rec, A) ->
+ case gb_sets:is_member(Edge, Edges) of
+ true -> A;
+ false ->
+ Of = wings_face:other(Face, Rec),
+ case gb_sets:is_member(Of, Res) of
+ true -> A;
+ false -> gb_sets:add(Of, A)
+ end
+ end
+ end, Work, Face, We).
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_face.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_face.erl
new file mode 100644
index 0000000000..487c05aa58
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_face.erl
@@ -0,0 +1,127 @@
+%%
+%% wings_face.erl --
+%%
+%% This module contains help routines for faces, such as fold functions
+%% face iterators.
+%%
+
+-module(wings_face).
+
+-export([delete_bad_faces/2, fold/4, fold_faces/4, from_edges/2,
+ inner_edges/2, to_edges/2, other/2]).
+
+-include("wings.hrl").
+
+from_edges(Es, #we{es=Etab}) when is_list(Es) ->
+ from_edges_1(Es, Etab, []);
+from_edges(Es, We) ->
+ from_edges(gb_sets:to_list(Es), We).
+
+from_edges_1([E|Es], Etab, Acc) ->
+ #edge{lf=Lf,rf=Rf} = gb_trees:get(E, Etab),
+ from_edges_1(Es, Etab, [Lf,Rf|Acc]);
+from_edges_1([], _, Acc) -> gb_sets:from_list(Acc).
+
+%% other(Face, EdgeRecord) -> OtherFace
+%% Pick up the "other face" from an edge record.
+other(Face, #edge{lf=Face,rf=Other}) -> Other;
+other(Face, #edge{rf=Face,lf=Other}) -> Other.
+
+%% to_edges(Faces, We) -> [Edge]
+%% Convert a set or list of faces to a list of edges.
+to_edges(Fs, We) ->
+ ordsets:from_list(to_edges_raw(Fs, We)).
+
+%% inner_edges(Faces, We) -> [Edge]
+%% Given a set of faces, return all inner edges.
+inner_edges(Faces, We) ->
+ S = to_edges_raw(Faces, We),
+ inner_edges_1(lists:sort(S), []).
+
+inner_edges_1([E,E|T], In) ->
+ inner_edges_1(T, [E|In]);
+inner_edges_1([_|T], In) ->
+ inner_edges_1(T, In);
+inner_edges_1([], In) -> lists:reverse(In).
+
+%% Fold over all edges surrounding a face.
+
+fold(F, Acc, Face, #we{es=Etab,fs=Ftab}) ->
+ Edge = gb_trees:get(Face, Ftab),
+ fold(Edge, Etab, F, Acc, Face, Edge, not_done).
+
+fold(LastEdge, _, _, Acc, _, LastEdge, done) -> Acc;
+fold(Edge, Etab, F, Acc0, Face, LastEdge, _) ->
+ case gb_trees:get(Edge, Etab) of
+ #edge{ve=V,lf=Face,ltsu=NextEdge}=E ->
+ Acc = F(V, Edge, E, Acc0),
+ fold(NextEdge, Etab, F, Acc, Face, LastEdge, done);
+ #edge{vs=V,rf=Face,rtsu=NextEdge}=E ->
+ Acc = F(V, Edge, E, Acc0),
+ fold(NextEdge, Etab, F, Acc, Face, LastEdge, done)
+ end.
+
+%% Fold over a set of faces.
+
+fold_faces(F, Acc0, [Face|Faces], #we{es=Etab,fs=Ftab}=We) ->
+ Edge = gb_trees:get(Face, Ftab),
+ Acc = fold_faces_1(Edge, Etab, F, Acc0, Face, Edge, not_done),
+ fold_faces(F, Acc, Faces, We);
+fold_faces(_F, Acc, [], _We) -> Acc;
+fold_faces(F, Acc, Faces, We) ->
+ fold_faces(F, Acc, gb_sets:to_list(Faces), We).
+
+fold_faces_1(LastEdge, _, _, Acc, _, LastEdge, done) -> Acc;
+fold_faces_1(Edge, Etab, F, Acc0, Face, LastEdge, _) ->
+ case gb_trees:get(Edge, Etab) of
+ #edge{ve=V,lf=Face,ltsu=NextEdge}=E ->
+ Acc = F(Face, V, Edge, E, Acc0),
+ fold_faces_1(NextEdge, Etab, F, Acc, Face, LastEdge, done);
+ #edge{vs=V,rf=Face,rtsu=NextEdge}=E ->
+ Acc = F(Face, V, Edge, E, Acc0),
+ fold_faces_1(NextEdge, Etab, F, Acc, Face, LastEdge, done)
+ end.
+
+%% Return an unsorted list of edges for the faces (with duplicates).
+
+to_edges_raw(Faces, #we{es=Etab,fs=Ftab}) when is_list(Faces) ->
+ to_edges_raw(Faces, Ftab, Etab, []);
+to_edges_raw(Faces, We) ->
+ to_edges_raw(gb_sets:to_list(Faces), We).
+
+to_edges_raw([Face|Faces], Ftab, Etab, Acc0) ->
+ Edge = gb_trees:get(Face, Ftab),
+ Acc = to_edges_raw_1(Edge, Etab, Acc0, Face, Edge, not_done),
+ to_edges_raw(Faces, Ftab, Etab, Acc);
+to_edges_raw([], _, _, Acc) -> Acc.
+
+to_edges_raw_1(LastEdge, _, Acc, _, LastEdge, done) -> Acc;
+to_edges_raw_1(Edge, Etab, Acc, Face, LastEdge, _) ->
+ case gb_trees:get(Edge, Etab) of
+ #edge{lf=Face,ltsu=NextEdge} ->
+ to_edges_raw_1(NextEdge, Etab, [Edge|Acc], Face, LastEdge, done);
+ #edge{rf=Face,rtsu=NextEdge} ->
+ to_edges_raw_1(NextEdge, Etab, [Edge|Acc], Face, LastEdge, done)
+ end.
+
+delete_bad_faces(Fs, #we{fs=Ftab,es=Etab}=We) when is_list(Fs) ->
+ Es = bad_edges(Fs, Ftab, Etab, []),
+ wings_edge:dissolve_edges(Es, We);
+delete_bad_faces(Fs, We) ->
+ delete_bad_faces(gb_sets:to_list(Fs), We).
+
+bad_edges([F|Fs], Ftab, Etab, Acc) ->
+ case gb_trees:lookup(F, Ftab) of
+ {value,Edge} ->
+ case gb_trees:get(Edge, Etab) of
+ #edge{ltpr=Same,ltsu=Same,rtpr=Same,rtsu=Same} ->
+ erlang:error({internal_error,one_edged_face,F});
+ #edge{ltpr=Same,ltsu=Same} ->
+ bad_edges(Fs, Ftab, Etab, [Edge|Acc]);
+ #edge{rtpr=Same,rtsu=Same} ->
+ bad_edges(Fs, Ftab, Etab, [Edge|Acc]);
+ _ -> bad_edges(Fs, Ftab, Etab, Acc)
+ end;
+ none -> bad_edges(Fs, Ftab, Etab, Acc)
+ end;
+bad_edges([], _, _, Acc) -> Acc.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_facemat.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_facemat.erl
new file mode 100644
index 0000000000..6e018e49b5
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_facemat.erl
@@ -0,0 +1,299 @@
+%%
+%% wings_facemat.erl --
+%%
+%% This module keeps tracks of the mapping from a face number
+%% to its material name.
+%%
+%% Copyright (c) 2001-2005 Bjorn Gustavsson
+%%
+%% See the file "license.terms" for information on usage and redistribution
+%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
+%%
+%% $Id: wings_facemat.erl,v 1.1 2009/01/25 18:55:33 kostis Exp $
+%%
+%%
+%%
+
+-module(wings_facemat).
+-export([all/1,face/2,used_materials/1,mat_faces/2,
+ assign/2,assign/3,
+ delete_face/2,delete_faces/2,keep_faces/2,
+ hide_faces/1,show_faces/1,
+ renumber/2,gc/1,merge/1]).
+
+-include("wings.hrl").
+-import(lists, [keysearch/3,reverse/1,reverse/2,sort/1]).
+
+%%%
+%%% API functions for retrieving information.
+%%%
+
+%% all(We) -> [{Face,MaterialName}]
+%% Return materials for all faces as an ordered list.
+all(#we{mat=M}=We) when is_atom(M) ->
+ Vis = visible_faces(We),
+ make_tab(Vis, M);
+all(#we{mat=L}) when is_list(L) ->
+ remove_invisible(L).
+
+%% face(Face, We) -> MaterialName
+%% Return the material for the face Face.
+face(_, #we{mat=M}) when is_atom(M) -> M;
+face(Face, #we{mat=Tab}) ->
+ {value,{_,Mat}} = keysearch(Face, 1, Tab),
+ Mat.
+
+%% used_materials(We) -> [MaterialName]
+%% Return an ordered list of all materials used in the We.
+used_materials(#we{mat=M}) when is_atom(M) -> [M];
+used_materials(#we{mat=L}) when is_list(L) ->
+ used_materials_1(L, []).
+
+%% mat_faces([{Face,Info}], We) -> [{Mat,[{Face,Info}]}]
+%% Group face tab into groups based on material.
+%% Used for displaying objects.
+mat_faces(Ftab, #we{mat=AtomMat}) when is_atom(AtomMat) ->
+ [{AtomMat,Ftab}];
+mat_faces(Ftab, #we{mat=MatTab}) ->
+ mat_faces_1(Ftab, remove_invisible(MatTab), []).
+
+%%%
+%%% API functions for updating material name mapping.
+%%%
+
+%% assign([{Face,MaterialName}], We) -> We'
+%% Assign materials.
+assign([], We) -> We;
+assign([{F,M}|_]=FaceMs, We) when is_atom(M), is_integer(F) ->
+ Tab = ordsets:from_list(FaceMs),
+ assign_face_ms(Tab, We).
+
+%% assign(MaterialName, Faces, We) -> We'
+%% Assign MaterialName to all faces Faces.
+assign(Mat, _, #we{mat=Mat}=We) when is_atom(Mat) -> We;
+assign(Mat, Fs, We) when is_atom(Mat), is_list(Fs) ->
+ assign_1(Mat, Fs, We);
+assign(Mat, Fs, We) when is_atom(Mat) ->
+ assign_1(Mat, gb_sets:to_list(Fs), We).
+
+%% delete_face(Face, We) -> We'
+%% Delete the material name mapping for the face Face.
+delete_face(_, #we{mat=AtomMat}=We) when is_atom(AtomMat) -> We;
+delete_face(Face, #we{mat=MatTab0}=We) ->
+ MatTab = orddict:erase(Face, MatTab0),
+ We#we{mat=MatTab}.
+
+%% delete_face(Faces, We) -> We'
+%% Delete the material name mapping for all faces Faces.
+delete_faces(_, #we{mat=AtomMat}=We) when is_atom(AtomMat) -> We;
+delete_faces(Faces0, #we{mat=MatTab0}=We) when is_list(Faces0) ->
+ Faces = sofs:from_external(Faces0, [face]),
+ MatTab1 = sofs:from_external(MatTab0, [{face,mat}]),
+ MatTab2 = sofs:drestriction(MatTab1, Faces),
+ MatTab = sofs:to_external(MatTab2),
+ We#we{mat=MatTab};
+delete_faces(Faces, We) ->
+ delete_faces(gb_sets:to_list(Faces), We).
+
+%% keep_faces(Faces, We) -> We'
+%% Delete all the other material names mapping for all faces other Faces.
+keep_faces(_, #we{mat=AtomMat}=We) when is_atom(AtomMat) -> We;
+keep_faces([Face], We) ->
+ Mat = face(Face,We),
+ We#we{mat=[{Face,Mat}]};
+keep_faces(Faces0, #we{mat=MatTab0}=We) when is_list(Faces0) ->
+ Faces = sofs:from_external(Faces0, [face]),
+ MatTab1 = sofs:from_external(MatTab0, [{face,mat}]),
+ MatTab2 = sofs:restriction(MatTab1, Faces),
+ MatTab = sofs:to_external(MatTab2),
+ We#we{mat=MatTab};
+keep_faces(Faces, We) ->
+ keep_faces(gb_sets:to_list(Faces), We).
+
+%% hide_faces(We) -> We'
+%% Update the material name mapping in the We to reflect
+%% the newly hidden faces in the face tab.
+hide_faces(#we{mat=M}=We) when is_atom(M) -> We;
+hide_faces(#we{mat=L0,fs=Ftab}=We) ->
+ L = hide_faces_1(L0, Ftab, []),
+ We#we{mat=L}.
+
+%% show_faces(We) -> We'
+%% Update the material name mapping in the We to reflect
+%% that all faces are again visible.
+show_faces(#we{mat=M}=We) when is_atom(M) -> We;
+show_faces(#we{mat=L0}=We) ->
+ L = show_faces_1(L0, []),
+ We#we{mat=L}.
+
+%% renumber(MaterialMapping, FaceOldToNew) -> MaterialMapping.
+%% Renumber face number in material name mapping.
+renumber(Mat, _) when is_atom(Mat) -> Mat;
+renumber(L, Fmap) when is_list(L) -> renumber_1(L, Fmap, []).
+
+%% gc(We) -> We'
+%% Garbage collect the material mapping information, removing
+%% the mapping for any face no longer present in the face table.
+gc(#we{mat=Mat}=We) when is_atom(Mat) -> We;
+gc(#we{mat=Tab0,fs=Ftab}=We) ->
+ Fs = sofs:from_external(gb_trees:keys(Ftab), [face]),
+ Tab1 = sofs:from_external(Tab0, [{face,material}]),
+ Tab2 = sofs:restriction(Tab1, Fs),
+ Tab = sofs:to_external(Tab2),
+ We#we{mat=compress(Tab)}.
+
+%% merge([We]) -> [{Face,MaterialName}] | MaterialName.
+%% Merge materials for several objects.
+merge([#we{mat=M}|Wes]=L) when is_atom(M) ->
+ case merge_all_same(Wes, M) of
+ true -> M;
+ false -> merge_1(L, [])
+ end;
+merge(L) -> merge_1(L, []).
+
+merge_1([#we{mat=M,es=Etab}|T], Acc) when is_atom(M) ->
+ FsM = merge_2(gb_trees:values(Etab), M, []),
+ merge_1(T, [FsM|Acc]);
+merge_1([#we{mat=FsMs}|T], Acc) ->
+ merge_1(T, [FsMs|Acc]);
+merge_1([], Acc) -> lists:merge(Acc).
+
+merge_2([#edge{lf=Lf,rf=Rf}|T], M, Acc) ->
+ merge_2(T, M, [{Lf,M},{Rf,M}|Acc]);
+merge_2([], _, Acc) -> ordsets:from_list(Acc).
+
+merge_all_same([#we{mat=M}|Wes], M) -> merge_all_same(Wes, M);
+merge_all_same([_|_], _) -> false;
+merge_all_same([], _) -> true.
+
+%%%
+%%% Local functions.
+%%%
+
+assign_1(Mat, Fs, #we{fs=Ftab}=We) ->
+ case length(Fs) =:= gb_trees:size(Ftab) of
+ true -> We#we{mat=Mat};
+ false -> assign_2(Mat, Fs, We)
+ end.
+
+assign_2(Mat, Fs0, #we{fs=Ftab,mat=Mat0}=We) when is_atom(Mat0) ->
+ Fs = ordsets:from_list(Fs0),
+ OtherFaces = ordsets:subtract(gb_trees:keys(Ftab), Fs),
+ Tab0 = make_tab(OtherFaces, Mat0),
+ Tab1 = make_tab(Fs, Mat),
+ Tab = lists:merge(Tab0, Tab1),
+ We#we{mat=Tab};
+assign_2(Mat, Fs0, #we{mat=Tab0}=We) when is_list(Tab0) ->
+ Fs = ordsets:from_list(Fs0),
+ Tab1 = make_tab(Fs, Mat),
+ Tab = mat_merge(Tab1, Tab0, []),
+ We#we{mat=Tab}.
+
+assign_face_ms(Tab, #we{fs=Ftab}=We) ->
+ case length(Tab) =:= gb_trees:size(Ftab) of
+ true -> We#we{mat=compress(Tab)};
+ false -> assign_face_ms_1(Tab, We)
+ end.
+
+assign_face_ms_1(Tab1, #we{fs=Ftab,mat=Mat0}=We) when is_atom(Mat0) ->
+ Tab0 = make_tab(gb_trees:keys(Ftab), Mat0),
+ Tab = mat_merge(Tab1, Tab0, []),
+ We#we{mat=Tab};
+assign_face_ms_1(Tab1, #we{mat=Tab0}=We) when is_list(Tab0) ->
+ Tab = mat_merge(Tab1, Tab0, []),
+ We#we{mat=Tab}.
+
+mat_merge([{Fn,_}|_]=Fns, [{Fo,_}=Fold|Fos], Acc) when Fo < Fn ->
+ mat_merge(Fns, Fos, [Fold|Acc]);
+mat_merge([{Fn,_}=Fnew|Fns], [{Fo,_}|_]=Fos, Acc) when Fo > Fn ->
+ mat_merge(Fns, Fos, [Fnew|Acc]);
+mat_merge([Fnew|Fns], [_|Fos], Acc) -> % Equality
+ mat_merge(Fns, Fos, [Fnew|Acc]);
+mat_merge([], Fos, Acc) ->
+ rev_compress(Acc, Fos);
+mat_merge(Fns, [], Acc) ->
+ rev_compress(Acc, Fns).
+
+make_tab(Fs, M) ->
+ make_tab_1(Fs, M, []).
+
+make_tab_1([F|Fs], M, Acc) ->
+ make_tab_1(Fs, M, [{F,M}|Acc]);
+make_tab_1([], _, Acc) -> reverse(Acc).
+
+
+visible_faces(#we{fs=Ftab}) ->
+ visible_faces_1(gb_trees:keys(Ftab)).
+
+visible_faces_1([F|Fs]) when F < 0 ->
+ visible_faces_1(Fs);
+visible_faces_1(Fs) -> Fs.
+
+remove_invisible([{F,_}|Fs]) when F < 0 ->
+ remove_invisible(Fs);
+remove_invisible(Fs) -> Fs.
+
+hide_faces_1([{F,_}=P|Fms], Ftab, Acc) when F < 0 ->
+ hide_faces_1(Fms, Ftab, [P|Acc]);
+hide_faces_1([{F,M}=P|Fms], Ftab, Acc) ->
+ case gb_trees:is_defined(F, Ftab) of
+ false -> hide_faces_1(Fms, Ftab, [{-F-1,M}|Acc]);
+ true -> hide_faces_1(Fms, Ftab, [P|Acc])
+ end;
+hide_faces_1([], _, Acc) -> sort(Acc).
+
+show_faces_1([{F,M}|Fms], Acc) when F < 0 ->
+ show_faces_1(Fms, [{-F-1,M}|Acc]);
+show_faces_1(Fs, Acc) -> sort(Acc++Fs).
+
+renumber_1([{F,M}|T], Fmap, Acc) ->
+ renumber_1(T, Fmap, [{gb_trees:get(F, Fmap),M}|Acc]);
+renumber_1([], _, Acc) -> sort(Acc).
+
+%% rev_compress([{Face,Mat}], [{Face,Mat}]) -> [{Face,Mat}] | Mat.
+%% Reverse just like lists:reverse/2, but if all materials
+%% turns out to be just the same, return that material.
+rev_compress(L, Acc) ->
+ case same_mat(Acc) of
+ [] -> reverse(L, Acc);
+ M -> rev_compress_1(L, M, Acc)
+ end.
+
+rev_compress_1([{_,M}=E|T], M, Acc) ->
+ %% Same material.
+ rev_compress_1(T, M, [E|Acc]);
+rev_compress_1([_|_]=L, _, Acc) ->
+ %% Another material. Finish by using reverse/2.
+ reverse(L, Acc);
+rev_compress_1([], M, _) ->
+ %% All materials turned out to be the same.
+ M.
+
+%% compress(MaterialTab) -> [{Face,Mat}] | Mat.
+%% Compress a face mapping if possible.
+compress(M) when is_atom(M) -> M;
+compress(L) when is_list(L) ->
+ case same_mat(L) of
+ [] -> L;
+ M -> M
+ end.
+
+same_mat([]) -> [];
+same_mat([{_,M}|T]) -> same_mat_1(T, M).
+
+same_mat_1([{_,M}|T], M) -> same_mat_1(T, M);
+same_mat_1([], M) -> M;
+same_mat_1(_, _) -> [].
+
+used_materials_1([{_,M}|T], [M|_]=Acc) ->
+ used_materials_1(T, Acc);
+used_materials_1([{_,M}|T], Acc) ->
+ used_materials_1(T, [M|Acc]);
+used_materials_1([], Acc) ->
+ ordsets:from_list(Acc).
+
+mat_faces_1([{F1,_}|_]=Fs, [{F2,_}|Ms], Acc) when F2 < F1 ->
+ mat_faces_1(Fs, Ms, Acc);
+mat_faces_1([{F,Info}|Fs], [{F,Mat}|Ms], Acc) ->
+ mat_faces_1(Fs, Ms, [{Mat,{F,Info}}|Acc]);
+mat_faces_1([], _, Acc) -> wings_util:rel2fam(Acc).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_intl.hrl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_intl.hrl
new file mode 100644
index 0000000000..ebcb560f27
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_intl.hrl
@@ -0,0 +1,15 @@
+%%
+%% wings_intl.hrl --
+%%
+%% Defines for translations
+%%
+%% Copyright (c) 2001-2005 Bjorn Gustavsson
+%%
+%% See the file "license.terms" for information on usage and redistribution
+%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
+%%
+%% $Id: wings_intl.hrl,v 1.1 2009/01/25 18:55:33 kostis Exp $
+%%
+
+-define(STR(A,B,Str), wings_lang:str({?MODULE,A,B},Str)).
+-define(__(Key,Str), wings_lang:str({?MODULE,Key},Str)).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_io.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_io.erl
new file mode 100644
index 0000000000..39002c675d
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_io.erl
@@ -0,0 +1,37 @@
+%%
+%% wings_io.erl --
+%%
+%% This module contains most of the low-level GUI for Wings.
+%%
+
+-module(wings_io).
+
+-export([get_matching_events/1]).
+
+-define(EVENT_QUEUE, wings_io_event_queue).
+
+%%%
+%%% Input.
+%%%
+
+get_matching_events(Filter) ->
+ Eq = get(?EVENT_QUEUE),
+ get_matching_events_1(Filter, Eq, [], []).
+
+get_matching_events_1(Filter, Eq0, Match, NoMatch) ->
+ case queue:out(Eq0) of
+ {{value,Ev},Eq} ->
+ case Filter(Ev) of
+ false ->
+ get_matching_events_1(Filter, Eq, Match, [Ev|NoMatch]);
+ true ->
+ get_matching_events_1(Filter, Eq, [Ev|Match], NoMatch)
+ end;
+ {empty,{In,Out}} ->
+ case Match of
+ [] -> [];
+ _ ->
+ put(?EVENT_QUEUE, {In, lists:reverse(NoMatch, Out)}),
+ Match
+ end
+ end.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_sel.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_sel.erl
new file mode 100644
index 0000000000..eef797027e
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_sel.erl
@@ -0,0 +1,68 @@
+%%
+%% wings_sel.erl --
+%%
+%% This module implements selection utilities.
+%%
+
+-module(wings_sel).
+
+-export([face_regions/2, fold/3, set/3]).
+
+-include("wings.hrl").
+
+set(Mode, Sel, St) ->
+ St#st{selmode=Mode, sel=lists:sort(Sel), sh=false}.
+
+%%%
+%%% Fold over the selection.
+%%%
+
+fold(F, Acc, #st{sel=Sel,shapes=Shapes}) ->
+ fold_1(F, Acc, Shapes, Sel).
+
+fold_1(F, Acc0, Shapes, [{Id,Items}|T]) ->
+ We = gb_trees:get(Id, Shapes),
+ ?ASSERT(We#we.id =:= Id),
+ fold_1(F, F(Items, We, Acc0), Shapes, T);
+fold_1(_F, Acc, _Shapes, []) -> Acc.
+
+%%%
+%%% Divide the face selection into regions where each face shares at least
+%%% one edge with another face in the same region. Two faces can share a
+%%% vertex without necessarily being in the same region.
+%%%
+
+face_regions(Faces, We) when is_list(Faces) ->
+ face_regions_1(gb_sets:from_list(Faces), We);
+face_regions(Faces, We) ->
+ face_regions_1(Faces, We).
+
+face_regions_1(Faces, We) ->
+ find_face_regions(Faces, We, fun collect_face_fun/5, []).
+
+find_face_regions(Faces0, We, Coll, Acc) ->
+ case gb_sets:is_empty(Faces0) of
+ true -> Acc;
+ false ->
+ {Face,Faces1} = gb_sets:take_smallest(Faces0),
+ Ws = [Face],
+ {Reg,Faces} = collect_face_region(Ws, We, Coll, [], Faces1),
+ find_face_regions(Faces, We, Coll, [Reg|Acc])
+ end.
+
+collect_face_region([_|_]=Ws0, We, Coll, Reg0, Faces0) ->
+ Reg = Ws0++Reg0,
+ {Ws,Faces} = wings_face:fold_faces(Coll, {[],Faces0}, Ws0, We),
+ collect_face_region(Ws, We, Coll, Reg, Faces);
+collect_face_region([], _, _, Reg, Faces) ->
+ {gb_sets:from_list(Reg),Faces}.
+
+collect_face_fun(Face, _, _, Rec, {Ws,Faces}=A) ->
+ Of = case Rec of
+ #edge{lf=Face,rf=Of0} -> Of0;
+ #edge{rf=Face,lf=Of0} -> Of0
+ end,
+ case gb_sets:is_member(Of, Faces) of
+ true -> {[Of|Ws],gb_sets:delete(Of, Faces)};
+ false -> A
+ end.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_shape.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_shape.erl
new file mode 100644
index 0000000000..0df8ca68eb
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_shape.erl
@@ -0,0 +1,69 @@
+%%
+%% wings_shape.erl --
+%%
+%% Utilities for shape records.
+%%
+
+-module(wings_shape).
+
+-export([insert/3]).
+
+-include("wings.hrl").
+
+%%%
+%%% Exported functions.
+%%%
+
+%% new(We, Suffix, St0) -> St.
+%% Suffix = cut | clone | copy | extract | sep
+%%
+%% Create a new object based on an old object. The name
+%% will be created from the old name (with digits and known
+%% suffixes stripped) with the given Suffix and a number
+%% appended.
+insert(#we{name=OldName}=We0, Suffix, #st{shapes=Shapes0,onext=Oid}=St) ->
+ Name = new_name(OldName, Suffix, Oid),
+ We = We0#we{id=Oid,name=Name},
+ Shapes = gb_trees:insert(Oid, We, Shapes0),
+ St#st{shapes=Shapes,onext=Oid+1}.
+
+%%%
+%%% Local functions follow.
+%%%
+
+new_name(OldName, Suffix0, Id) ->
+ Suffix = suffix(Suffix0),
+ Base = base(lists:reverse(OldName)),
+ lists:reverse(Base, "_" ++ Suffix ++ integer_to_list(Id)).
+
+%% Note: Filename suffixes are intentionally not translated.
+%% If we are to translate them in the future, base/1 below
+%% must be updated to strip suffixes (both for the current language
+%% and for English).
+
+suffix(cut) -> "cut";
+suffix(clone) -> "clone";
+suffix(copy) -> "copy";
+suffix(extract) -> "extract";
+suffix(mirror) -> "mirror";
+suffix(sep) -> "sep".
+
+%% base_1(ReversedName) -> ReversedBaseName
+%% Given an object name, strip digits and known suffixes to
+%% create a base name. Returns the unchanged name if
+%% no known suffix could be stripped.
+
+base(OldName) ->
+ case base_1(OldName) of
+ error -> OldName;
+ Base -> Base
+ end.
+
+base_1([H|T]) when $0 =< H, H =< $9 -> base_1(T);
+base_1("tuc_"++Base) -> Base; %"_cut"
+base_1("enolc_"++Base) -> Base; %"_clone"
+base_1("ypoc_"++Base) -> Base; %"_copy"
+base_1("tcartxe_"++Base) -> Base; %"_extract"
+base_1("rorrim_"++Base) -> Base; %"_mirror"
+base_1("pes_"++Base) -> Base; %"_sep"
+base_1(_Base) -> error.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_util.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_util.erl
new file mode 100644
index 0000000000..9572e19955
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_util.erl
@@ -0,0 +1,39 @@
+%%
+%% wings_util.erl --
+%%
+%% Various utility functions that not obviously fit somewhere else.
+%%
+
+-module(wings_util).
+
+-export([gb_trees_smallest_key/1, gb_trees_largest_key/1,
+ gb_trees_map/2, rel2fam/1]).
+
+-include("wings.hrl").
+
+rel2fam(Rel) ->
+ sofs:to_external(sofs:relation_to_family(sofs:relation(Rel))).
+
+%% a definition that does not violate the opaqueness of gb_tree()
+gb_trees_smallest_key(Tree) ->
+ {Key, _V} = gb_trees:smallest(Tree),
+ Key.
+
+%% a definition that violates the opaqueness of gb_tree()
+gb_trees_largest_key({_, Tree}) ->
+ largest_key1(Tree).
+
+largest_key1({Key, _Value, _Smaller, nil}) ->
+ Key;
+largest_key1({_Key, _Value, _Smaller, Larger}) ->
+ largest_key1(Larger).
+
+gb_trees_map(F, {Size,Tree}) ->
+ {Size,gb_trees_map_1(F, Tree)}.
+
+gb_trees_map_1(_, nil) -> nil;
+gb_trees_map_1(F, {K,V,Smaller,Larger}) ->
+ {K,F(K, V),
+ gb_trees_map_1(F, Smaller),
+ gb_trees_map_1(F, Larger)}.
+
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_we.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_we.erl
new file mode 100644
index 0000000000..d782144def
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/wings/wings_we.erl
@@ -0,0 +1,250 @@
+%%
+%% wings_we.erl --
+%%
+%% This module contains functions to build and manipulate
+%% we records (winged-edged records, the central data structure
+%% in Wings 3D).
+
+-module(wings_we).
+
+-export([rebuild/1, is_consistent/1, is_face_consistent/2, new_id/1,
+ new_items_as_ordset/3, validate_mirror/1, visible/1, visible_edges/1]).
+
+-include("wings.hrl").
+
+%%%
+%%% API.
+%%%
+
+validate_mirror(#we{mirror=none}=We) -> We;
+validate_mirror(#we{fs=Ftab,mirror=Face}=We) ->
+ case gb_trees:is_defined(Face, Ftab) of
+ false -> We#we{mirror=none};
+ true -> We
+ end.
+
+%% rebuild(We) -> We'
+%% Rebuild any missing 'vc' and 'fs' tables. If there are
+%% fewer elements in the 'vc' table than in the 'vp' table,
+%% remove redundant entries in the 'vp' table. Updated id
+%% bounds.
+rebuild(#we{vc=undefined,fs=undefined,es=Etab0}=We0) ->
+ Etab = gb_trees:to_list(Etab0),
+ Ftab = rebuild_ftab(Etab),
+ VctList = rebuild_vct(Etab),
+ We = We0#we{vc=gb_trees:from_orddict(VctList),fs=Ftab},
+ rebuild_1(VctList, We);
+rebuild(#we{vc=undefined,es=Etab}=We) ->
+ VctList = rebuild_vct(gb_trees:to_list(Etab), []),
+ rebuild_1(VctList, We#we{vc=gb_trees:from_orddict(VctList)});
+rebuild(#we{fs=undefined,es=Etab}=We) ->
+ Ftab = rebuild_ftab(gb_trees:to_list(Etab)),
+ rebuild(We#we{fs=Ftab});
+rebuild(We) -> update_id_bounds(We).
+
+%%% Utilities for allocating IDs.
+
+new_id(#we{next_id=Id}=We) ->
+ {Id,We#we{next_id=Id+1}}.
+
+%%% Returns sets of newly created items.
+
+new_items_as_ordset(vertex, #we{next_id=Wid}, #we{next_id=NewWid,vp=Tab}) ->
+ new_items_as_ordset_1(Tab, Wid, NewWid);
+new_items_as_ordset(edge, #we{next_id=Wid}, #we{next_id=NewWid,es=Tab}) ->
+ new_items_as_ordset_1(Tab, Wid, NewWid);
+new_items_as_ordset(face, #we{next_id=Wid}, #we{next_id=NewWid,fs=Tab}) ->
+ new_items_as_ordset_1(Tab, Wid, NewWid).
+
+any_hidden(#we{fs=Ftab}) ->
+ not gb_trees:is_empty(Ftab) andalso
+ wings_util:gb_trees_smallest_key(Ftab) < 0.
+
+%%%
+%%% Local functions.
+%%%
+
+rebuild_1(VctList, #we{vc=Vct,vp=Vtab0}=We) ->
+ case {gb_trees:size(Vct),gb_trees:size(Vtab0)} of
+ {Same,Same} -> rebuild(We);
+ {Sz1,Sz2} when Sz1 < Sz2 ->
+ Vtab = vertex_gc_1(VctList, gb_trees:to_list(Vtab0), []),
+ rebuild(We#we{vp=Vtab})
+ end.
+
+rebuild_vct(Es) ->
+ rebuild_vct(Es, []).
+
+rebuild_vct([{Edge,#edge{vs=Va,ve=Vb}}|Es], Acc0) ->
+ Acc = rebuild_maybe_add(Va, Vb, Edge, Acc0),
+ rebuild_vct(Es, Acc);
+rebuild_vct([], VtoE) ->
+ build_incident_tab(VtoE).
+
+rebuild_ftab(Es) ->
+ rebuild_ftab_1(Es, []).
+
+rebuild_ftab_1([{Edge,#edge{lf=Lf,rf=Rf}}|Es], Acc0) ->
+ Acc = rebuild_maybe_add(Lf, Rf, Edge, Acc0),
+ rebuild_ftab_1(Es, Acc);
+rebuild_ftab_1([], FtoE) ->
+ gb_trees:from_orddict(build_incident_tab(FtoE)).
+
+rebuild_maybe_add(Ka, Kb, E, [_,{Ka,_}|_]=Acc) ->
+ [{Kb,E}|Acc];
+rebuild_maybe_add(Ka, Kb, E, [_,{Kb,_}|_]=Acc) ->
+ [{Ka,E}|Acc];
+rebuild_maybe_add(Ka, Kb, E, [{Ka,_}|_]=Acc) ->
+ [{Kb,E}|Acc];
+rebuild_maybe_add(Ka, Kb, E, [{Kb,_}|_]=Acc) ->
+ [{Ka,E}|Acc];
+rebuild_maybe_add(Ka, Kb, E, Acc) ->
+ [{Ka,E},{Kb,E}|Acc].
+
+vertex_gc_1([{V,_}|Vct], [{V,_}=Vtx|Vpos], Acc) ->
+ vertex_gc_1(Vct, Vpos, [Vtx|Acc]);
+vertex_gc_1([_|_]=Vct, [_|Vpos], Acc) ->
+ vertex_gc_1(Vct, Vpos, Acc);
+vertex_gc_1([], _, Acc) ->
+ gb_trees:from_orddict(lists:reverse(Acc)).
+
+%%%
+%%% Handling of hidden faces.
+%%%
+
+visible(#we{mirror=none,fs=Ftab}) ->
+ visible_2(gb_trees:keys(Ftab));
+visible(#we{mirror=Face,fs=Ftab}) ->
+ visible_2(gb_trees:keys(gb_trees:delete(Face, Ftab))).
+
+visible_2([F|Fs]) when F < 0 -> visible_2(Fs);
+visible_2(Fs) -> Fs.
+
+visible_edges(#we{es=Etab,mirror=Face}=We) ->
+ case any_hidden(We) of
+ false -> gb_trees:keys(Etab);
+ true -> visible_es_1(gb_trees:to_list(Etab), Face, [])
+ end.
+
+visible_es_1([{E,#edge{lf=Lf,rf=Rf}}|Es], Face, Acc) ->
+ if
+ Lf < 0 ->
+ %% Left face hidden.
+ if
+ Rf < 0; Rf =:= Face ->
+ %% Both faces invisible (in some way).
+ visible_es_1(Es, Face, Acc);
+ true ->
+ %% Right face is visible.
+ visible_es_1(Es, Face, [E|Acc])
+ end;
+ Lf =:= Face, Rf < 0 ->
+ %% Left face mirror, right face hidden.
+ visible_es_1(Es, Face, Acc);
+ true ->
+ %% At least one face visible.
+ visible_es_1(Es, Face, [E|Acc])
+ end;
+visible_es_1([], _, Acc) -> ordsets:from_list(Acc).
+
+update_id_bounds(#we{vp=Vtab,es=Etab,fs=Ftab}=We) ->
+ case gb_trees:is_empty(Etab) of
+ true -> We#we{next_id=0};
+ false ->
+ LastId = lists:max([wings_util:gb_trees_largest_key(Vtab),
+ wings_util:gb_trees_largest_key(Etab),
+ wings_util:gb_trees_largest_key(Ftab)]),
+ We#we{next_id=LastId+1}
+ end.
+
+%% build_incident_tab([{Elem,Edge}]) -> [{Elem,Edge}]
+%% Elem = Face or Vertex
+%% Build the table of incident edges for either faces or vertices.
+%% Returns an ordered list where each Elem is unique.
+
+build_incident_tab(ElemToEdgeRel) ->
+ T = ets:new(?MODULE, [ordered_set]),
+ ets:insert(T, ElemToEdgeRel),
+ R = ets:tab2list(T),
+ ets:delete(T),
+ R.
+
+%%%
+%%% Calculate normals.
+%%%
+
+new_items_as_ordset_1(Tab, Wid, NewWid) when NewWid-Wid < 32 ->
+ new_items_as_ordset_2(Wid, NewWid, Tab, []);
+new_items_as_ordset_1(Tab, Wid, _NewWid) ->
+ [Item || Item <- gb_trees:keys(Tab), Item >= Wid].
+
+new_items_as_ordset_2(Wid, NewWid, Tab, Acc) when Wid < NewWid ->
+ case gb_trees:is_defined(Wid, Tab) of
+ true -> new_items_as_ordset_2(Wid+1, NewWid, Tab, [Wid|Acc]);
+ false -> new_items_as_ordset_2(Wid+1, NewWid, Tab, Acc)
+ end;
+new_items_as_ordset_2(_Wid, _NewWid, _Tab, Acc) -> lists:reverse(Acc).
+
+%%%
+%%% Test the consistency of a #we{}.
+%%%
+
+is_consistent(#we{}=We) ->
+ try
+ validate_vertex_tab(We),
+ validate_faces(We)
+ catch error:_ -> false
+ end.
+
+is_face_consistent(Face, #we{fs=Ftab,es=Etab}) ->
+ Edge = gb_trees:get(Face, Ftab),
+ try validate_face(Face, Edge, Etab)
+ catch error:_ -> false
+ end.
+
+validate_faces(#we{fs=Ftab,es=Etab}) ->
+ validate_faces_1(gb_trees:to_list(Ftab), Etab).
+
+validate_faces_1([{Face,Edge}|Fs], Etab) ->
+ validate_face(Face, Edge, Etab),
+ validate_faces_1(Fs, Etab);
+validate_faces_1([], _) -> true.
+
+validate_face(Face, Edge, Etab) ->
+ Ccw = walk_face_ccw(Edge, Etab, Face, Edge, []),
+ Edge = walk_face_cw(Edge, Etab, Face, Ccw),
+ [V|Vs] = lists:sort(Ccw),
+ validate_face_vertices(Vs, V).
+
+validate_face_vertices([V|_], V) ->
+ erlang:error(repeated_vertex);
+validate_face_vertices([_], _) ->
+ true;
+validate_face_vertices([V|Vs], _) ->
+ validate_face_vertices(Vs, V).
+
+walk_face_ccw(LastEdge, _, _, LastEdge, [_|_]=Acc) -> Acc;
+walk_face_ccw(Edge, Etab, Face, LastEdge, Acc) ->
+ case gb_trees:get(Edge, Etab) of
+ #edge{ve=V,lf=Face,ltpr=Next} ->
+ walk_face_ccw(Next, Etab, Face, LastEdge, [V|Acc]);
+ #edge{vs=V,rf=Face,rtpr=Next} ->
+ walk_face_ccw(Next, Etab, Face, LastEdge, [V|Acc])
+ end.
+
+walk_face_cw(Edge, _, _, []) -> Edge;
+walk_face_cw(Edge, Etab, Face, [V|Vs]) ->
+ case gb_trees:get(Edge, Etab) of
+ #edge{vs=V,lf=Face,ltsu=Next} ->
+ walk_face_cw(Next, Etab, Face, Vs);
+ #edge{ve=V,rf=Face,rtsu=Next} ->
+ walk_face_cw(Next, Etab, Face, Vs)
+ end.
+
+validate_vertex_tab(#we{es=Etab,vc=Vct}) ->
+ lists:foreach(fun({V,Edge}) ->
+ case gb_trees:get(Edge, Etab) of
+ #edge{vs=V} -> ok;
+ #edge{ve=V} -> ok
+ end
+ end, gb_trees:to_list(Vct)).
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis1.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis1.erl
new file mode 100644
index 0000000000..82bcf2edcf
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis1.erl
@@ -0,0 +1,14 @@
+-module(zoltan_kis1).
+
+-export([f/0, gen/0]).
+
+-opaque id() :: string().
+
+-spec f() -> integer().
+
+%BIF and Unification(t_unify) issue
+f() -> erlang:length(gen()).
+
+-spec gen() -> id().
+
+gen() -> "Dummy".
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis2.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis2.erl
new file mode 100644
index 0000000000..3a269622fd
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis2.erl
@@ -0,0 +1,14 @@
+-module(zoltan_kis2).
+
+-export([get/2]).
+
+-opaque data() :: gb_tree().
+
+-spec get(term(), data()) -> term().
+
+get(Key, Data) ->
+ %%Should unopaque data for remote calls
+ case gb_trees:lookup(Key, Data) of
+ 'none' -> 'undefined';
+ {'value', Val} -> Val
+ end.
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis3.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis3.erl
new file mode 100644
index 0000000000..d92c6766ff
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis3.erl
@@ -0,0 +1,14 @@
+-module(zoltan_kis3).
+
+-export([f/0, gen/0]).
+
+-opaque id() :: string().
+
+-spec f() -> char().
+
+%%List pattern matching issue
+f() -> [H|_T] = gen(), H.
+
+-spec gen() -> id().
+
+gen() -> "Dummy".
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis4.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis4.erl
new file mode 100644
index 0000000000..aa1a4abcb7
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis4.erl
@@ -0,0 +1,14 @@
+-module(zoltan_kis4).
+
+-export([f/0, gen/0]).
+
+-opaque id() :: string().
+
+-spec f() -> boolean().
+
+%%Equality test issue
+f() -> "Dummy" == gen().
+
+-spec gen() -> id().
+
+gen() -> "Dummy".
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis5.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis5.erl
new file mode 100644
index 0000000000..30cebf806a
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis5.erl
@@ -0,0 +1,14 @@
+-module(zoltan_kis5).
+
+-export([f/0, gen/0]).
+
+-opaque id() :: string().
+
+-spec f() -> boolean().
+
+%% Equality test issue
+f() -> "Dummy" == gen().
+
+-spec gen() -> id().
+
+gen() -> "Dummy".
diff --git a/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis6.erl b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis6.erl
new file mode 100644
index 0000000000..6f0779d7d1
--- /dev/null
+++ b/lib/dialyzer/test/opaque_tests_SUITE_data/src/zoltan_kis6.erl
@@ -0,0 +1,14 @@
+-module(zoltan_kis6).
+
+-export([f/0, gen/0]).
+
+-opaque id() :: {integer(),atom()}.
+
+%%-spec f() -> id().
+
+%% Tuple Unification (t_unify) issue
+f() -> {X,Y} = gen().
+
+-spec gen() -> id().
+
+gen() -> {34, leprecon}.
diff --git a/lib/dialyzer/test/options1_tests_SUITE.erl b/lib/dialyzer/test/options1_tests_SUITE.erl
new file mode 100644
index 0000000000..02cafe6c5f
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE.erl
@@ -0,0 +1,54 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(options1_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([options1_tests_SUITE_consistency/1, compiler/1]).
+
+suite() ->
+ [{timetrap, {minutes, 30}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, [{include_dirs,["my_include"]},
+ {defines,[{'COMPILER_VSN',42}]},
+ {warnings,[no_improper_lists]}]}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [options1_tests_SUITE_consistency,compiler].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+options1_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+compiler(Config) ->
+ case dialyze(Config, compiler) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/options1_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..c612e77d3e
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/dialyzer_options
@@ -0,0 +1,2 @@
+{dialyzer_options, [{include_dirs, ["my_include"]}, {defines, [{'COMPILER_VSN', 42}]}, {warnings, [no_improper_lists]}]}.
+{time_limit, 30}.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Entries b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Entries
new file mode 100644
index 0000000000..513d4a315a
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Entries
@@ -0,0 +1,3 @@
+/erl_bits.hrl/1.1/Wed Dec 17 09:53:40 2008//
+/erl_compile.hrl/1.1/Wed Dec 17 09:53:40 2008//
+D
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Repository b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Repository
new file mode 100644
index 0000000000..1c6511fec3
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Repository
@@ -0,0 +1 @@
+dialyzer_tests/option_tests/compiler/my_include
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Root b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Root
new file mode 100644
index 0000000000..f6cdd6158b
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/CVS/Root
@@ -0,0 +1 @@
+:pserver:[email protected]:/hipe
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_bits.hrl b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_bits.hrl
new file mode 100644
index 0000000000..96d5cec268
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_bits.hrl
@@ -0,0 +1,43 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.0, (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.erlang.org/EPL1_0.txt
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is Erlang-4.7.3, December, 1998.
+%%
+%% The Initial Developer of the Original Code is Ericsson Telecom
+%% AB. Portions created by Ericsson are Copyright (C), 1998, Ericsson
+%% Telecom AB. All Rights Reserved.
+%%
+%% Contributor(s): ______________________________________.''
+%%
+%% This is an -*- erlang -*- file.
+%% Generic compiler options, passed from the erl_compile module.
+
+-record(bittype, {
+ type, %% integer/float/binary
+ unit, %% element unit
+ sign, %% signed/unsigned
+ endian %% big/little
+ }).
+
+-record(bitdefault, {
+ integer, %% default type for integer
+ float, %% default type for float
+ binary %% default type for binary
+ }).
+
+%%% (From config.hrl in the bitsyntax branch.)
+-define(SYS_ENDIAN, big).
+-define(SIZEOF_CHAR, 1).
+-define(SIZEOF_DOUBLE, 8).
+-define(SIZEOF_FLOAT, 4).
+-define(SIZEOF_INT, 4).
+-define(SIZEOF_LONG, 4).
+-define(SIZEOF_LONG_LONG, 8).
+-define(SIZEOF_SHORT, 2).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_compile.hrl b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_compile.hrl
new file mode 100644
index 0000000000..ef2b68ac9a
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/my_include/erl_compile.hrl
@@ -0,0 +1,42 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: erl_compile.hrl,v 1.1 2008/12/17 09:53:40 mikpe Exp $
+%%
+
+%% Generic compiler options, passed from the erl_compile module.
+
+-record(options,
+ {includes=[], % Include paths (list of absolute
+ % directory names).
+ outdir=".", % Directory for result (absolute
+ % path).
+ output_type=undefined, % Type of output file (atom).
+ defines=[], % Preprocessor defines. Each
+ % element is an atom (the name to
+ % define), or a {Name, Value}
+ % tuple.
+ warning=1, % Warning level (0 - no
+ % warnings, 1 - standard level,
+ % 2, 3, ... - more warnings).
+ verbose=false, % Verbose (true/false).
+ optimize=999, % Optimize options.
+ specific=[], % Compiler specific options.
+ outfile="", % Name of output file (internal
+ % use in erl_compile.erl).
+ cwd % Current working directory
+ % for erlc.
+ }).
+
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/results/compiler b/lib/dialyzer/test/options1_tests_SUITE_data/results/compiler
new file mode 100644
index 0000000000..924ef389df
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/results/compiler
@@ -0,0 +1,35 @@
+
+beam_asm.erl:32: The pattern {'error', Error} can never match the type <<_:64,_:_*8>>
+beam_bool.erl:193: The pattern {[], _} can never match the type {[{_,_,_,_},...],[any()]}
+beam_bool.erl:510: The pattern [{'set', [Dst], _, _}, {'%live', _}] can never match the type [{_,_,_,_}]
+beam_disasm.erl:537: The variable X can never match since previous clauses completely covered the type 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
+beam_type.erl:284: The pattern <'pi', 0> can never match the type <_,1 | 2>
+beam_validator.erl:396: The pattern <{'jump', {'f', _}}, Vst = {'vst', 'none', _}> can never match the type <_,#vst{current::#st{ct::[]}}>
+beam_validator.erl:690: The pattern <'term', OldT> can never match the type <{'tuple',[any(),...]},_>
+beam_validator.erl:692: Clause guard cannot succeed. The pattern <NewT = {Type, New}, OldT = {_, Old}> was matched against the type <{'tuple',[any(),...]},_>
+beam_validator.erl:699: Clause guard cannot succeed. The pattern <NewT = {Type, _}, 'number'> was matched against the type <{'tuple',[any(),...]},_>
+beam_validator.erl:702: The pattern <'number', OldT = {Type, _}> can never match the type <{'tuple',[any(),...]},_>
+beam_validator.erl:705: The pattern <'bool', {'atom', A}> can never match the type <{'tuple',[any(),...]},_>
+beam_validator.erl:707: The pattern <{'atom', A}, 'bool'> can never match the type <{'tuple',[any(),...]},_>
+beam_validator.erl:713: Guard test is_integer(Sz::[any(),...]) can never succeed
+beam_validator.erl:727: Function upgrade_bool/1 will never be called
+cerl_inline.erl:190: The pattern 'true' can never match the type 'false'
+cerl_inline.erl:219: The pattern 'true' can never match the type 'false'
+cerl_inline.erl:230: The pattern 'true' can never match the type 'false'
+cerl_inline.erl:2333: The pattern 'true' can never match the type 'false'
+cerl_inline.erl:2355: The pattern 'true' can never match the type 'false'
+cerl_inline.erl:238: The pattern 'true' can never match the type 'false'
+cerl_inline.erl:2436: Function filename/1 will never be called
+cerl_inline.erl:2700: The pattern 'true' can never match the type 'false'
+cerl_inline.erl:2730: The pattern <{F, L, D}, Vs> can never match the type <[1..255,...],[any()]>
+cerl_inline.erl:2738: The pattern <{F, L, D}, Vs> can never match the type <[1..255,...],[any()]>
+cerl_inline.erl:2750: The pattern <{[], L, D}, Vs> can never match the type <[1..255,...],[any()]>
+cerl_inline.erl:2752: The pattern <{[], _L, D}, Vs> can never match the type <[1..255,...],[any()]>
+cerl_inline.erl:2754: The pattern <{F, L, D}, Vs> can never match the type <[1..255,...],[any()]>
+cerl_inline.erl:2756: The pattern <{F, _L, D}, Vs> can never match the type <[1..255,...],[any()]>
+compile.erl:788: The pattern {'error', Es} can never match the type {'ok',<<_:64,_:_*8>>}
+core_lint.erl:473: The pattern <{'c_atom', _, 'all'}, 'binary', _Def, St> can never match the type <_,#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}},tl::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}}},[any()],_>
+core_lint.erl:505: The pattern <_Req, 'unknown', St> can never match the type <non_neg_integer(),non_neg_integer(),_>
+v3_codegen.erl:1569: The call v3_codegen:load_reg_1(V::any(),I::0,Rs::any(),pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
+v3_codegen.erl:1571: The call v3_codegen:load_reg_1(V::any(),I::0,[],pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
+v3_core.erl:646: The pattern <Prim = {'iprimop', _, _, _}, St> can never match the type <#c_nil{anno::[any(),...]} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple' | 'c_var' | 'ibinary' | 'icatch' | 'ireceive1',[any(),...] | {_,_,_,_},_} | #c_cons{anno::[any(),...]} | #c_fname{anno::[any(),...]} | #iletrec{anno::{_,_,_,_},defs::[any(),...],body::[any(),...]} | #icase{anno::{_,_,_,_},args::[any()],clauses::[any()],fc::{_,_,_,_,_,_}} | #ireceive2{anno::{_,_,_,_},clauses::[any()],action::[any()]} | #ifun{anno::{_,_,_,_},id::[any(),...],vars::[any()],clauses::[any(),...],fc::{_,_,_,_,_,_}} | #imatch{anno::{_,_,_,_},guard::[],fc::{_,_,_,_,_,_}} | #itry{anno::{_,_,_,_},args::[any()],vars::[any(),...],body::[any(),...],evars::[any(),...],handler::[any(),...]},_>
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_asm.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_asm.erl
new file mode 100644
index 0000000000..c2d9edcaa7
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_asm.erl
@@ -0,0 +1,358 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_asm.erl,v 1.1 2008/12/17 09:53:40 mikpe Exp $
+%%
+%% Purpose : Assembler for threaded Beam.
+
+-module(beam_asm).
+
+-export([module/4,format_error/1]).
+-export([encode/2]).
+
+-import(lists, [map/2,member/2,keymember/3,duplicate/2]).
+-include("beam_opcodes.hrl").
+
+-define(bs_aligned, 1).
+
+module(Code, Abst, SourceFile, Opts) ->
+ case assemble(Code, Abst, SourceFile, Opts) of
+ {error, Error} ->
+ {error, [{none, ?MODULE, Error}]};
+ Bin when binary(Bin) ->
+ {ok, Bin}
+ end.
+
+format_error({crashed, Why}) ->
+ io_lib:format("beam_asm_int: EXIT: ~p", [Why]).
+
+assemble({Mod,Exp,Attr,Asm,NumLabels}, Abst, SourceFile, Opts) ->
+ {1,Dict0} = beam_dict:atom(Mod, beam_dict:new()),
+ NumFuncs = length(Asm),
+ {Code,Dict1} = assemble_1(Asm, Exp, Dict0, []),
+ build_file(Code, Attr, Dict1, NumLabels, NumFuncs, Abst, SourceFile, Opts).
+
+assemble_1([{function,Name,Arity,Entry,Asm}|T], Exp, Dict0, Acc) ->
+ Dict1 = case member({Name,Arity}, Exp) of
+ true ->
+ beam_dict:export(Name, Arity, Entry, Dict0);
+ false ->
+ beam_dict:local(Name, Arity, Entry, Dict0)
+ end,
+ {Code, Dict2} = assemble_function(Asm, Acc, Dict1),
+ assemble_1(T, Exp, Dict2, Code);
+assemble_1([], _Exp, Dict0, Acc) ->
+ {IntCodeEnd,Dict1} = make_op(int_code_end, Dict0),
+ {list_to_binary(lists:reverse(Acc, [IntCodeEnd])),Dict1}.
+
+assemble_function([H|T], Acc, Dict0) ->
+ {Code, Dict} = make_op(H, Dict0),
+ assemble_function(T, [Code| Acc], Dict);
+assemble_function([], Code, Dict) ->
+ {Code, Dict}.
+
+build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
+ %% Create the code chunk.
+
+ CodeChunk = chunk(<<"Code">>,
+ <<16:32,
+ (beam_opcodes:format_number()):32,
+ (beam_dict:highest_opcode(Dict)):32,
+ NumLabels:32,
+ NumFuncs:32>>,
+ Code),
+
+ %% Create the atom table chunk.
+
+ {NumAtoms, AtomTab} = beam_dict:atom_table(Dict),
+ AtomChunk = chunk(<<"Atom">>, <<NumAtoms:32>>, AtomTab),
+
+ %% Create the import table chunk.
+
+ {NumImps, ImpTab0} = beam_dict:import_table(Dict),
+ Imp = flatten_imports(ImpTab0),
+ ImportChunk = chunk(<<"ImpT">>, <<NumImps:32>>, Imp),
+
+ %% Create the export table chunk.
+
+ {NumExps, ExpTab0} = beam_dict:export_table(Dict),
+ Exp = flatten_exports(ExpTab0),
+ ExpChunk = chunk(<<"ExpT">>, <<NumExps:32>>, Exp),
+
+ %% Create the local function table chunk.
+
+ {NumLocals, Locals} = beam_dict:local_table(Dict),
+ Loc = flatten_exports(Locals),
+ LocChunk = chunk(<<"LocT">>, <<NumLocals:32>>, Loc),
+
+ %% Create the string table chunk.
+
+ {_,StringTab} = beam_dict:string_table(Dict),
+ StringChunk = chunk(<<"StrT">>, StringTab),
+
+ %% Create the fun table chunk. It is important not to build an empty chunk,
+ %% as that would change the MD5.
+
+ LambdaChunk = case beam_dict:lambda_table(Dict) of
+ {0,[]} -> [];
+ {NumLambdas,LambdaTab} ->
+ chunk(<<"FunT">>, <<NumLambdas:32>>, LambdaTab)
+ end,
+
+ %% Create the attributes and compile info chunks.
+
+ Essentials = [AtomChunk,CodeChunk,StringChunk,ImportChunk,ExpChunk,LambdaChunk],
+ {Attributes,Compile} = build_attributes(Opts, SourceFile, Attr, Essentials),
+ AttrChunk = chunk(<<"Attr">>, Attributes),
+ CompileChunk = chunk(<<"CInf">>, Compile),
+
+ %% Create the abstract code chunk.
+
+ AbstChunk = chunk(<<"Abst">>, Abst),
+
+ %% Create IFF chunk.
+
+ Chunks = case member(slim, Opts) of
+ true -> [Essentials,AttrChunk,CompileChunk,AbstChunk];
+ false -> [Essentials,LocChunk,AttrChunk,CompileChunk,AbstChunk]
+ end,
+ build_form(<<"BEAM">>, Chunks).
+
+%% Build an IFF form.
+
+build_form(Id, Chunks0) when size(Id) == 4, list(Chunks0) ->
+ Chunks = list_to_binary(Chunks0),
+ Size = size(Chunks),
+ 0 = Size rem 4, % Assertion: correct padding?
+ <<"FOR1",(Size+4):32,Id/binary,Chunks/binary>>.
+
+%% Build a correctly padded chunk (with no sub-header).
+
+chunk(Id, Contents) when size(Id) == 4, binary(Contents) ->
+ Size = size(Contents),
+ [<<Id/binary,Size:32>>,Contents|pad(Size)];
+chunk(Id, Contents) when list(Contents) ->
+ chunk(Id, list_to_binary(Contents)).
+
+%% Build a correctly padded chunk (with a sub-header).
+
+chunk(Id, Head, Contents) when size(Id) == 4, is_binary(Head), is_binary(Contents) ->
+ Size = size(Head)+size(Contents),
+ [<<Id/binary,Size:32,Head/binary>>,Contents|pad(Size)];
+chunk(Id, Head, Contents) when list(Contents) ->
+ chunk(Id, Head, list_to_binary(Contents)).
+
+pad(Size) ->
+ case Size rem 4 of
+ 0 -> [];
+ Rem -> duplicate(4 - Rem, 0)
+ end.
+
+flatten_exports(Exps) ->
+ list_to_binary(map(fun({F,A,L}) -> <<F:32,A:32,L:32>> end, Exps)).
+
+flatten_imports(Imps) ->
+ list_to_binary(map(fun({M,F,A}) -> <<M:32,F:32,A:32>> end, Imps)).
+
+build_attributes(Opts, SourceFile, Attr, Essentials) ->
+ Misc = case member(slim, Opts) of
+ false ->
+ {{Y,Mo,D},{H,Mi,S}} = erlang:universaltime(),
+ [{time,{Y,Mo,D,H,Mi,S}},{source,SourceFile}];
+ true -> []
+ end,
+ Compile = [{options,Opts},{version,?COMPILER_VSN}|Misc],
+ {term_to_binary(calc_vsn(Attr, Essentials)),term_to_binary(Compile)}.
+
+%%
+%% If the attributes contains no 'vsn' attribute, we'll insert one
+%% with an MD5 "checksum" calculated on the code as its value.
+%% We'll not change an existing 'vsn' attribute.
+%%
+
+calc_vsn(Attr, Essentials) ->
+ case keymember(vsn, 1, Attr) of
+ true -> Attr;
+ false ->
+ <<Number:128>> = erlang:md5(Essentials),
+ [{vsn,[Number]}|Attr]
+ end.
+
+bif_type('-', 1) -> negate;
+bif_type('+', 2) -> {op, m_plus};
+bif_type('-', 2) -> {op, m_minus};
+bif_type('*', 2) -> {op, m_times};
+bif_type('/', 2) -> {op, m_div};
+bif_type('div', 2) -> {op, int_div};
+bif_type('rem', 2) -> {op, int_rem};
+bif_type('band', 2) -> {op, int_band};
+bif_type('bor', 2) -> {op, int_bor};
+bif_type('bxor', 2) -> {op, int_bxor};
+bif_type('bsl', 2) -> {op, int_bsl};
+bif_type('bsr', 2) -> {op, int_bsr};
+bif_type('bnot', 1) -> {op, int_bnot};
+bif_type(fnegate, 1) -> {op, fnegate};
+bif_type(fadd, 2) -> {op, fadd};
+bif_type(fsub, 2) -> {op, fsub};
+bif_type(fmul, 2) -> {op, fmul};
+bif_type(fdiv, 2) -> {op, fdiv};
+bif_type(_, _) -> bif.
+
+make_op(Comment, Dict) when element(1, Comment) == '%' ->
+ {[],Dict};
+make_op({'%live',_R}, Dict) ->
+ {[],Dict};
+make_op({bif, Bif, nofail, [], Dest}, Dict) ->
+ encode_op(bif0, [{extfunc, erlang, Bif, 0}, Dest], Dict);
+make_op({bif, raise, _Fail, [A1,A2], _Dest}, Dict) ->
+ encode_op(raise, [A1,A2], Dict);
+make_op({bif, Bif, Fail, Args, Dest}, Dict) ->
+ Arity = length(Args),
+ case bif_type(Bif, Arity) of
+ {op, Op} ->
+ make_op(list_to_tuple([Op, Fail|Args++[Dest]]), Dict);
+ negate ->
+ %% Fake negation operator.
+ make_op({m_minus, Fail, {integer,0}, hd(Args), Dest}, Dict);
+ bif ->
+ BifOp = list_to_atom(lists:concat([bif, Arity])),
+ encode_op(BifOp, [Fail, {extfunc, erlang, Bif, Arity}|Args++[Dest]],
+ Dict)
+ end;
+make_op({bs_add=Op,Fail,[Src1,Src2,Unit],Dest}, Dict) ->
+ encode_op(Op, [Fail,Src1,Src2,Unit,Dest], Dict);
+make_op({test,Cond,Fail,Ops}, Dict) when list(Ops) ->
+ encode_op(Cond, [Fail|Ops], Dict);
+make_op({make_fun2,{f,Lbl},Index,OldUniq,NumFree}, Dict0) ->
+ {Fun,Dict} = beam_dict:lambda(Lbl, Index, OldUniq, NumFree, Dict0),
+ make_op({make_fun2,Fun}, Dict);
+make_op(Op, Dict) when atom(Op) ->
+ encode_op(Op, [], Dict);
+make_op({kill,Y}, Dict) ->
+ make_op({init,Y}, Dict);
+make_op({Name,Arg1}, Dict) ->
+ encode_op(Name, [Arg1], Dict);
+make_op({Name,Arg1,Arg2}, Dict) ->
+ encode_op(Name, [Arg1,Arg2], Dict);
+make_op({Name,Arg1,Arg2,Arg3}, Dict) ->
+ encode_op(Name, [Arg1,Arg2,Arg3], Dict);
+make_op({Name,Arg1,Arg2,Arg3,Arg4}, Dict) ->
+ encode_op(Name, [Arg1,Arg2,Arg3,Arg4], Dict);
+make_op({Name,Arg1,Arg2,Arg3,Arg4,Arg5}, Dict) ->
+ encode_op(Name, [Arg1,Arg2,Arg3,Arg4,Arg5], Dict);
+make_op({Name,Arg1,Arg2,Arg3,Arg4,Arg5,Arg6}, Dict) ->
+ encode_op(Name, [Arg1,Arg2,Arg3,Arg4,Arg5,Arg6], Dict).
+
+encode_op(Name, Args, Dict0) when atom(Name) ->
+ {EncArgs,Dict1} = encode_args(Args, Dict0),
+ Op = beam_opcodes:opcode(Name, length(Args)),
+ Dict2 = beam_dict:opcode(Op, Dict1),
+ {list_to_binary([Op|EncArgs]),Dict2}.
+
+encode_args([Arg| T], Dict0) ->
+ {EncArg, Dict1} = encode_arg(Arg, Dict0),
+ {EncTail, Dict2} = encode_args(T, Dict1),
+ {[EncArg| EncTail], Dict2};
+encode_args([], Dict) ->
+ {[], Dict}.
+
+encode_arg({x, X}, Dict) when X >= 0 ->
+ {encode(?tag_x, X), Dict};
+encode_arg({y, Y}, Dict) when Y >= 0 ->
+ {encode(?tag_y, Y), Dict};
+encode_arg({atom, Atom}, Dict0) when atom(Atom) ->
+ {Index, Dict} = beam_dict:atom(Atom, Dict0),
+ {encode(?tag_a, Index), Dict};
+encode_arg({integer, N}, Dict) ->
+ {encode(?tag_i, N), Dict};
+encode_arg(nil, Dict) ->
+ {encode(?tag_a, 0), Dict};
+encode_arg({f, W}, Dict) ->
+ {encode(?tag_f, W), Dict};
+encode_arg({'char', C}, Dict) ->
+ {encode(?tag_h, C), Dict};
+encode_arg({string, String}, Dict0) ->
+ {Offset, Dict} = beam_dict:string(String, Dict0),
+ {encode(?tag_u, Offset), Dict};
+encode_arg({extfunc, M, F, A}, Dict0) ->
+ {Index, Dict} = beam_dict:import(M, F, A, Dict0),
+ {encode(?tag_u, Index), Dict};
+encode_arg({list, List}, Dict0) ->
+ {L, Dict} = encode_list(List, Dict0, []),
+ {[encode(?tag_z, 1), encode(?tag_u, length(List))|L], Dict};
+encode_arg({float, Float}, Dict) when float(Float) ->
+ {[encode(?tag_z, 0)|<<Float:64/float>>], Dict};
+encode_arg({fr,Fr}, Dict) ->
+ {[encode(?tag_z, 2),encode(?tag_u,Fr)], Dict};
+encode_arg({field_flags,Flags0}, Dict) ->
+ Flags = lists:foldl(fun (F, S) -> S bor flag_to_bit(F) end, 0, Flags0),
+ {encode(?tag_u, Flags), Dict};
+encode_arg({alloc,List}, Dict) ->
+ {encode_alloc_list(List),Dict};
+encode_arg(Int, Dict) when is_integer(Int) ->
+ {encode(?tag_u, Int),Dict}.
+
+flag_to_bit(aligned) -> 16#01;
+flag_to_bit(little) -> 16#02;
+flag_to_bit(big) -> 16#00;
+flag_to_bit(signed) -> 16#04;
+flag_to_bit(unsigned)-> 16#00;
+flag_to_bit(exact) -> 16#08;
+flag_to_bit(native) -> 16#10.
+
+encode_list([H|T], _Dict, _Acc) when is_list(H) ->
+ exit({illegal_nested_list,encode_arg,[H|T]});
+encode_list([H|T], Dict0, Acc) ->
+ {Enc,Dict} = encode_arg(H, Dict0),
+ encode_list(T, Dict, [Enc|Acc]);
+encode_list([], Dict, Acc) ->
+ {lists:reverse(Acc), Dict}.
+
+encode_alloc_list(L0) ->
+ L = encode_alloc_list_1(L0),
+ [encode(?tag_z, 3),encode(?tag_u, length(L0))|L].
+
+encode_alloc_list_1([{words,Words}|T]) ->
+ [encode(?tag_u, 0),encode(?tag_u, Words)|encode_alloc_list_1(T)];
+encode_alloc_list_1([{floats,Floats}|T]) ->
+ [encode(?tag_u, 1),encode(?tag_u, Floats)|encode_alloc_list_1(T)];
+encode_alloc_list_1([]) -> [].
+
+encode(Tag, N) when N < 0 ->
+ encode1(Tag, negative_to_bytes(N, []));
+encode(Tag, N) when N < 16 ->
+ (N bsl 4) bor Tag;
+encode(Tag, N) when N < 16#800 ->
+ [((N bsr 3) band 2#11100000) bor Tag bor 2#00001000, N band 16#ff];
+encode(Tag, N) ->
+ encode1(Tag, to_bytes(N, [])).
+
+encode1(Tag, Bytes) ->
+ case length(Bytes) of
+ Num when 2 =< Num, Num =< 8 ->
+ [((Num-2) bsl 5) bor 2#00011000 bor Tag| Bytes];
+ Num when 8 < Num ->
+ [2#11111000 bor Tag, encode(?tag_u, Num-9)| Bytes]
+ end.
+
+to_bytes(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+to_bytes(N, Acc) ->
+ to_bytes(N bsr 8, [N band 16#ff| Acc]).
+
+negative_to_bytes(-1, [B1, B2|T]) when B1 > 127 ->
+ [B1, B2|T];
+negative_to_bytes(N, Acc) ->
+ negative_to_bytes(N bsr 8, [N band 16#ff|Acc]).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_block.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_block.erl
new file mode 100644
index 0000000000..b0dd3e6380
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_block.erl
@@ -0,0 +1,601 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_block.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%% Purpose : Partitions assembly instructions into basic blocks and
+%% optimizes them.
+
+-module(beam_block).
+
+-export([module/2]).
+-export([live_at_entry/1]). %Used by beam_type, beam_bool.
+-export([is_killed/2]). %Used by beam_dead, beam_type, beam_bool.
+-export([is_not_used/2]). %Used by beam_bool.
+-export([merge_blocks/2]). %Used by beam_jump.
+-import(lists, [map/2,mapfoldr/3,reverse/1,reverse/2,foldl/3,
+ member/2,sort/1,all/2]).
+-define(MAXREG, 1024).
+
+module({Mod,Exp,Attr,Fs,Lc}, _Opt) ->
+ {ok,{Mod,Exp,Attr,map(fun function/1, Fs),Lc}}.
+
+function({function,Name,Arity,CLabel,Is0}) ->
+ %% Collect basic blocks and optimize them.
+ Is = blockify(Is0),
+
+ %% Done.
+ {function,Name,Arity,CLabel,Is}.
+
+%% blockify(Instructions0) -> Instructions
+%% Collect sequences of instructions to basic blocks and
+%% optimize the contents of the blocks. Also do some simple
+%% optimations on instructions outside the blocks.
+
+blockify(Is) ->
+ blockify(Is, []).
+
+blockify([{loop_rec,{f,Fail},{x,0}},{loop_rec_end,_Lbl},{label,Fail}|Is], Acc) ->
+ %% Useless instruction sequence.
+ blockify(Is, Acc);
+blockify([{test,bs_test_tail,F,[Bits]}|Is],
+ [{test,bs_skip_bits,F,[{integer,I},Unit,_Flags]}|Acc]) ->
+ blockify(Is, [{test,bs_test_tail,F,[Bits+I*Unit]}|Acc]);
+blockify([{test,bs_skip_bits,F,[{integer,I1},Unit1,_]}|Is],
+ [{test,bs_skip_bits,F,[{integer,I2},Unit2,Flags]}|Acc]) ->
+ blockify(Is, [{test,bs_skip_bits,F,
+ [{integer,I1*Unit1+I2*Unit2},1,Flags]}|Acc]);
+blockify([{test,is_atom,{f,Fail},[Reg]}=I|
+ [{select_val,Reg,{f,Fail},
+ {list,[{atom,false},{f,_}=BrFalse,
+ {atom,true}=AtomTrue,{f,_}=BrTrue]}}|Is]=Is0],
+ [{block,Bl}|_]=Acc) ->
+ case is_last_bool(Bl, Reg) of
+ false ->
+ blockify(Is0, [I|Acc]);
+ true ->
+ blockify(Is, [{jump,BrTrue},
+ {test,is_eq_exact,BrFalse,[Reg,AtomTrue]}|Acc])
+ end;
+blockify([{test,is_atom,{f,Fail},[Reg]}=I|
+ [{select_val,Reg,{f,Fail},
+ {list,[{atom,true}=AtomTrue,{f,_}=BrTrue,
+ {atom,false},{f,_}=BrFalse]}}|Is]=Is0],
+ [{block,Bl}|_]=Acc) ->
+ case is_last_bool(Bl, Reg) of
+ false ->
+ blockify(Is0, [I|Acc]);
+ true ->
+ blockify(Is, [{jump,BrTrue},
+ {test,is_eq_exact,BrFalse,[Reg,AtomTrue]}|Acc])
+ end;
+blockify([I|Is0]=IsAll, Acc) ->
+ case is_bs_put(I) of
+ true ->
+ {BsPuts0,Is} = collect_bs_puts(IsAll),
+ BsPuts = opt_bs_puts(BsPuts0),
+ blockify(Is, reverse(BsPuts, Acc));
+ false ->
+ case collect(I) of
+ error -> blockify(Is0, [I|Acc]);
+ Instr when is_tuple(Instr) ->
+ {Block0,Is} = collect_block(IsAll),
+ Block = opt_block(Block0),
+ blockify(Is, [{block,Block}|Acc])
+ end
+ end;
+blockify([], Acc) -> reverse(Acc).
+
+is_last_bool([I,{'%live',_}], Reg) ->
+ is_last_bool([I], Reg);
+is_last_bool([{set,[Reg],As,{bif,N,_}}], Reg) ->
+ Ar = length(As),
+ erl_internal:new_type_test(N, Ar) orelse erl_internal:comp_op(N, Ar)
+ orelse erl_internal:bool_op(N, Ar);
+is_last_bool([_|Is], Reg) -> is_last_bool(Is, Reg);
+is_last_bool([], _) -> false.
+
+collect_block(Is) ->
+ collect_block(Is, []).
+
+collect_block([{allocate_zero,Ns,R},{test_heap,Nh,R}|Is], Acc) ->
+ collect_block(Is, [{allocate,R,{no_opt,Ns,Nh,[]}}|Acc]);
+collect_block([I|Is]=Is0, Acc) ->
+ case collect(I) of
+ error -> {reverse(Acc),Is0};
+ Instr -> collect_block(Is, [Instr|Acc])
+ end;
+collect_block([], Acc) -> {reverse(Acc),[]}.
+
+collect({allocate_zero,N,R}) -> {allocate,R,{zero,N,0,[]}};
+collect({test_heap,N,R}) -> {allocate,R,{nozero,nostack,N,[]}};
+collect({bif,N,nofail,As,D}) -> {set,[D],As,{bif,N}};
+collect({bif,N,F,As,D}) -> {set,[D],As,{bif,N,F}};
+collect({move,S,D}) -> {set,[D],[S],move};
+collect({put_list,S1,S2,D}) -> {set,[D],[S1,S2],put_list};
+collect({put_tuple,A,D}) -> {set,[D],[],{put_tuple,A}};
+collect({put,S}) -> {set,[],[S],put};
+collect({put_string,L,S,D}) -> {set,[D],[],{put_string,L,S}};
+collect({get_tuple_element,S,I,D}) -> {set,[D],[S],{get_tuple_element,I}};
+collect({set_tuple_element,S,D,I}) -> {set,[],[S,D],{set_tuple_element,I}};
+collect({get_list,S,D1,D2}) -> {set,[D1,D2],[S],get_list};
+collect(remove_message) -> {set,[],[],remove_message};
+collect({'catch',R,L}) -> {set,[R],[],{'catch',L}};
+collect({'%live',_}=Live) -> Live;
+collect(_) -> error.
+
+opt_block(Is0) ->
+ %% We explicitly move any allocate instruction upwards before optimising
+ %% moves, to avoid any potential problems with the calculation of live
+ %% registers.
+ Is1 = find_fixpoint(fun move_allocates/1, Is0),
+ Is2 = find_fixpoint(fun opt/1, Is1),
+ Is = opt_alloc(Is2),
+ share_floats(Is).
+
+find_fixpoint(OptFun, Is0) ->
+ case OptFun(Is0) of
+ Is0 -> Is0;
+ Is1 -> find_fixpoint(OptFun, Is1)
+ end.
+
+move_allocates([{set,_Ds,_Ss,{set_tuple_element,_}}|_]=Is) -> Is;
+move_allocates([{set,Ds,Ss,_Op}=Set,{allocate,R,Alloc}|Is]) when is_integer(R) ->
+ [{allocate,live_regs(Ds, Ss, R),Alloc},Set|Is];
+move_allocates([{allocate,R1,Alloc1},{allocate,R2,Alloc2}|Is]) ->
+ R1 = R2, % Assertion.
+ move_allocates([{allocate,R1,combine_alloc(Alloc1, Alloc2)}|Is]);
+move_allocates([I|Is]) ->
+ [I|move_allocates(Is)];
+move_allocates([]) -> [].
+
+combine_alloc({_,Ns,Nh1,Init}, {_,nostack,Nh2,[]}) ->
+ {zero,Ns,Nh1+Nh2,Init}.
+
+merge_blocks([{allocate,R,{Attr,Ns,Nh1,Init}}|B1],
+ [{allocate,_,{_,nostack,Nh2,[]}}|B2]) ->
+ Alloc = {allocate,R,{Attr,Ns,Nh1+Nh2,Init}},
+ [Alloc|merge_blocks(B1, B2)];
+merge_blocks(B1, B2) -> merge_blocks_1(B1++[{set,[],[],stop_here}|B2]).
+
+merge_blocks_1([{set,[],_,stop_here}|Is]) -> Is;
+merge_blocks_1([{set,[D],_,move}=I|Is]) ->
+ case is_killed(D, Is) of
+ true -> merge_blocks_1(Is);
+ false -> [I|merge_blocks_1(Is)]
+ end;
+merge_blocks_1([I|Is]) -> [I|merge_blocks_1(Is)].
+
+opt([{set,[Dst],As,{bif,Bif,Fail}}=I1,
+ {set,[Dst],[Dst],{bif,'not',Fail}}=I2|Is]) ->
+ %% Get rid of the 'not' if the operation can be inverted.
+ case inverse_comp_op(Bif) of
+ none -> [I1,I2|opt(Is)];
+ RevBif -> [{set,[Dst],As,{bif,RevBif,Fail}}|opt(Is)]
+ end;
+opt([{set,[X],[X],move}|Is]) -> opt(Is);
+opt([{set,[D1],[{integer,Idx1},Reg],{bif,element,{f,0}}}=I1,
+ {set,[D2],[{integer,Idx2},Reg],{bif,element,{f,0}}}=I2|Is])
+ when Idx1 < Idx2, D1 =/= D2, D1 =/= Reg, D2 =/= Reg ->
+ opt([I2,I1|Is]);
+opt([{set,Ds0,Ss,Op}|Is0]) ->
+ {Ds,Is} = opt_moves(Ds0, Is0),
+ [{set,Ds,Ss,Op}|opt(Is)];
+opt([I|Is]) -> [I|opt(Is)];
+opt([]) -> [].
+
+opt_moves([], Is0) -> {[],Is0};
+opt_moves([D0], Is0) ->
+ {D1,Is1} = opt_move(D0, Is0),
+ {[D1],Is1};
+opt_moves([X0,Y0]=Ds, Is0) ->
+ {X1,Is1} = opt_move(X0, Is0),
+ case opt_move(Y0, Is1) of
+ {Y1,Is2} when X1 =/= Y1 -> {[X1,Y1],Is2};
+ _Other when X1 =/= Y0 -> {[X1,Y0],Is1};
+ _Other -> {Ds,Is0}
+ end.
+
+opt_move(R, [{set,[D],[R],move}|Is]=Is0) ->
+ case is_killed(R, Is) of
+ true -> {D,Is};
+ false -> {R,Is0}
+ end;
+opt_move(R, [I|Is0]) ->
+ case is_transparent(R, I) of
+ true ->
+ {D,Is1} = opt_move(R, Is0),
+ case is_transparent(D, I) of
+ true -> {D,[I|Is1]};
+ false -> {R,[I|Is0]}
+ end;
+ false -> {R,[I|Is0]}
+ end;
+opt_move(R, []) -> {R,[]}.
+
+is_transparent(R, {set,Ds,Ss,_Op}) ->
+ case member(R, Ds) of
+ true -> false;
+ false -> not member(R, Ss)
+ end;
+is_transparent(_, _) -> false.
+
+%% is_killed(Register, [Instruction]) -> true|false
+%% Determine whether a register is killed by the instruction sequence.
+%% If true is returned, it means that the register will not be
+%% referenced in ANY way (not even indirectly by an allocate instruction);
+%% i.e. it is OK to enter the instruction sequence with Register
+%% containing garbage.
+
+is_killed({x,N}=R, [{block,Blk}|Is]) ->
+ case is_killed(R, Blk) of
+ true -> true;
+ false ->
+ %% Before looking beyond the block, we must be
+ %% sure that the register is not referenced by
+ %% any allocate instruction in the block.
+ case all(fun({allocate,Live,_}) when N < Live -> false;
+ (_) -> true
+ end, Blk) of
+ true -> is_killed(R, Is);
+ false -> false
+ end
+ end;
+is_killed(R, [{block,Blk}|Is]) ->
+ case is_killed(R, Blk) of
+ true -> true;
+ false -> is_killed(R, Is)
+ end;
+is_killed(R, [{set,Ds,Ss,_Op}|Is]) ->
+ case member(R, Ss) of
+ true -> false;
+ false ->
+ case member(R, Ds) of
+ true -> true;
+ false -> is_killed(R, Is)
+ end
+ end;
+is_killed(R, [{case_end,Used}|_]) -> R =/= Used;
+is_killed(R, [{badmatch,Used}|_]) -> R =/= Used;
+is_killed(_, [if_end|_]) -> true;
+is_killed(R, [{func_info,_,_,Ar}|_]) ->
+ case R of
+ {x,X} when X < Ar -> false;
+ _ -> true
+ end;
+is_killed(R, [{kill,R}|_]) -> true;
+is_killed(R, [{kill,_}|Is]) -> is_killed(R, Is);
+is_killed(R, [{bs_init2,_,_,_,_,_,Dst}|Is]) ->
+ if
+ R =:= Dst -> true;
+ true -> is_killed(R, Is)
+ end;
+is_killed(R, [{bs_put_string,_,_}|Is]) -> is_killed(R, Is);
+is_killed({x,R}, [{'%live',Live}|_]) when R >= Live -> true;
+is_killed({x,R}, [{'%live',_}|Is]) -> is_killed(R, Is);
+is_killed({x,R}, [{allocate,Live,_}|_]) ->
+ %% Note: To be safe here, we must return either true or false,
+ %% not looking further at the instructions beyond the allocate
+ %% instruction.
+ R >= Live;
+is_killed({x,R}, [{call,Live,_}|_]) when R >= Live -> true;
+is_killed({x,R}, [{call_last,Live,_,_}|_]) when R >= Live -> true;
+is_killed({x,R}, [{call_only,Live,_}|_]) when R >= Live -> true;
+is_killed({x,R}, [{call_ext,Live,_}|_]) when R >= Live -> true;
+is_killed({x,R}, [{call_ext_last,Live,_,_}|_]) when R >= Live -> true;
+is_killed({x,R}, [{call_ext_only,Live,_}|_]) when R >= Live -> true;
+is_killed({x,R}, [return|_]) when R > 0 -> true;
+is_killed(_, _) -> false.
+
+%% is_not_used(Register, [Instruction]) -> true|false
+%% Determine whether a register is used by the instruction sequence.
+%% If true is returned, it means that the register will not be
+%% referenced directly, but it may be referenced by an allocate
+%% instruction (meaning that it is NOT allowed to contain garbage).
+
+is_not_used(R, [{block,Blk}|Is]) ->
+ case is_not_used(R, Blk) of
+ true -> true;
+ false -> is_not_used(R, Is)
+ end;
+is_not_used({x,R}=Reg, [{allocate,Live,_}|Is]) ->
+ if
+ R >= Live -> true;
+ true -> is_not_used(Reg, Is)
+ end;
+is_not_used(R, [{set,Ds,Ss,_Op}|Is]) ->
+ case member(R, Ss) of
+ true -> false;
+ false ->
+ case member(R, Ds) of
+ true -> true;
+ false -> is_not_used(R, Is)
+ end
+ end;
+is_not_used(R, Is) -> is_killed(R, Is).
+
+%% opt_alloc(Instructions) -> Instructions'
+%% Optimises all allocate instructions.
+
+opt_alloc([{allocate,R,{_,Ns,Nh,[]}}|Is]) ->
+ [opt_alloc(Is, Ns, Nh, R)|opt(Is)];
+opt_alloc([I|Is]) -> [I|opt_alloc(Is)];
+opt_alloc([]) -> [].
+
+%% opt_alloc(Instructions, FrameSize, HeapNeed, LivingRegs) -> [Instr]
+%% Generates the optimal sequence of instructions for
+%% allocating and initalizing the stack frame and needed heap.
+
+opt_alloc(_Is, nostack, Nh, LivingRegs) ->
+ {allocate,LivingRegs,{nozero,nostack,Nh,[]}};
+opt_alloc(Is, Ns, Nh, LivingRegs) ->
+ InitRegs = init_yreg(Is, 0),
+ case count_ones(InitRegs) of
+ N when N*2 > Ns ->
+ {allocate,LivingRegs,{nozero,Ns,Nh,gen_init(Ns, InitRegs)}};
+ _ ->
+ {allocate,LivingRegs,{zero,Ns,Nh,[]}}
+ end.
+
+gen_init(Fs, Regs) -> gen_init(Fs, Regs, 0, []).
+
+gen_init(SameFs, _Regs, SameFs, Acc) -> reverse(Acc);
+gen_init(Fs, Regs, Y, Acc) when Regs band 1 == 0 ->
+ gen_init(Fs, Regs bsr 1, Y+1, [{init, {y,Y}}|Acc]);
+gen_init(Fs, Regs, Y, Acc) ->
+ gen_init(Fs, Regs bsr 1, Y+1, Acc).
+
+%% init_yreg(Instructions, RegSet) -> RegSetInitialized
+%% Calculate the set of initialized y registers.
+
+init_yreg([{set,_,_,{bif,_,_}}|_], Reg) -> Reg;
+init_yreg([{set,Ds,_,_}|Is], Reg) -> init_yreg(Is, add_yregs(Ds, Reg));
+init_yreg(_Is, Reg) -> Reg.
+
+add_yregs(Ys, Reg) -> foldl(fun(Y, R0) -> add_yreg(Y, R0) end, Reg, Ys).
+
+add_yreg({y,Y}, Reg) -> Reg bor (1 bsl Y);
+add_yreg(_, Reg) -> Reg.
+
+count_ones(Bits) -> count_ones(Bits, 0).
+count_ones(0, Acc) -> Acc;
+count_ones(Bits, Acc) ->
+ count_ones(Bits bsr 1, Acc + (Bits band 1)).
+
+%% live_at_entry(Is) -> NumberOfRegisters
+%% Calculate the number of register live at the entry to the code
+%% sequence.
+
+live_at_entry([{block,[{allocate,R,_}|_]}|_]) ->
+ R;
+live_at_entry([{label,_}|Is]) ->
+ live_at_entry(Is);
+live_at_entry([{block,Bl}|_]) ->
+ live_at_entry(Bl);
+live_at_entry([{func_info,_,_,Ar}|_]) ->
+ Ar;
+live_at_entry(Is0) ->
+ case reverse(Is0) of
+ [{'%live',Regs}|Is] -> live_at_entry_1(Is, (1 bsl Regs)-1);
+ _ -> unknown
+ end.
+
+live_at_entry_1([{set,Ds,Ss,_}|Is], Rset0) ->
+ Rset = x_live(Ss, x_dead(Ds, Rset0)),
+ live_at_entry_1(Is, Rset);
+live_at_entry_1([{allocate,_,_}|Is], Rset) ->
+ live_at_entry_1(Is, Rset);
+live_at_entry_1([], Rset) -> live_regs_1(0, Rset).
+
+%% Calculate the new number of live registers when we move an allocate
+%% instruction upwards, passing a 'set' instruction.
+
+live_regs(Ds, Ss, Regs0) ->
+ Rset = x_live(Ss, x_dead(Ds, (1 bsl Regs0)-1)),
+ live_regs_1(0, Rset).
+
+live_regs_1(N, 0) -> N;
+live_regs_1(N, Regs) -> live_regs_1(N+1, Regs bsr 1).
+
+x_dead([{x,N}|Rs], Regs) -> x_dead(Rs, Regs band (bnot (1 bsl N)));
+x_dead([_|Rs], Regs) -> x_dead(Rs, Regs);
+x_dead([], Regs) -> Regs.
+
+x_live([{x,N}|Rs], Regs) -> x_live(Rs, Regs bor (1 bsl N));
+x_live([_|Rs], Regs) -> x_live(Rs, Regs);
+x_live([], Regs) -> Regs.
+
+%%
+%% If a floating point literal occurs more than once, move it into
+%% a free register and re-use it.
+%%
+
+share_floats([{allocate,_,_}=Alloc|Is]) ->
+ [Alloc|share_floats(Is)];
+share_floats(Is0) ->
+ All = get_floats(Is0, []),
+ MoreThanOnce0 = more_than_once(sort(All), gb_sets:empty()),
+ case gb_sets:is_empty(MoreThanOnce0) of
+ true -> Is0;
+ false ->
+ MoreThanOnce = gb_sets:to_list(MoreThanOnce0),
+ FreeX = highest_used(Is0, -1) + 1,
+ Regs0 = make_reg_map(MoreThanOnce, FreeX, []),
+ Regs = gb_trees:from_orddict(Regs0),
+ Is = map(fun({set,Ds,[{float,F}],Op}=I) ->
+ case gb_trees:lookup(F, Regs) of
+ none -> I;
+ {value,R} -> {set,Ds,[R],Op}
+ end;
+ (I) -> I
+ end, Is0),
+ [{set,[R],[{float,F}],move} || {F,R} <- Regs0] ++ Is
+ end.
+
+get_floats([{set,_,[{float,F}],_}|Is], Acc) ->
+ get_floats(Is, [F|Acc]);
+get_floats([_|Is], Acc) ->
+ get_floats(Is, Acc);
+get_floats([], Acc) -> Acc.
+
+more_than_once([F,F|Fs], Set) ->
+ more_than_once(Fs, gb_sets:add(F, Set));
+more_than_once([_|Fs], Set) ->
+ more_than_once(Fs, Set);
+more_than_once([], Set) -> Set.
+
+highest_used([{set,Ds,Ss,_}|Is], High) ->
+ highest_used(Is, highest(Ds, highest(Ss, High)));
+highest_used([{'%live',Live}|Is], High) when Live > High ->
+ highest_used(Is, Live);
+highest_used([_|Is], High) ->
+ highest_used(Is, High);
+highest_used([], High) -> High.
+
+highest([{x,R}|Rs], High) when R > High ->
+ highest(Rs, R);
+highest([_|Rs], High) ->
+ highest(Rs, High);
+highest([], High) -> High.
+
+make_reg_map([F|Fs], R, Acc) when R < ?MAXREG ->
+ make_reg_map(Fs, R+1, [{F,{x,R}}|Acc]);
+make_reg_map(_, _, Acc) -> sort(Acc).
+
+%% inverse_comp_op(Op) -> none|RevOp
+
+inverse_comp_op('=:=') -> '=/=';
+inverse_comp_op('=/=') -> '=:=';
+inverse_comp_op('==') -> '/=';
+inverse_comp_op('/=') -> '==';
+inverse_comp_op('>') -> '=<';
+inverse_comp_op('<') -> '>=';
+inverse_comp_op('>=') -> '<';
+inverse_comp_op('=<') -> '>';
+inverse_comp_op(_) -> none.
+
+%%%
+%%% Evaluation of constant bit fields.
+%%%
+
+is_bs_put({bs_put_integer,_,_,_,_,_}) -> true;
+is_bs_put({bs_put_float,_,_,_,_,_}) -> true;
+is_bs_put(_) -> false.
+
+collect_bs_puts(Is) ->
+ collect_bs_puts_1(Is, []).
+
+collect_bs_puts_1([I|Is]=Is0, Acc) ->
+ case is_bs_put(I) of
+ false -> {reverse(Acc),Is0};
+ true -> collect_bs_puts_1(Is, [I|Acc])
+ end;
+collect_bs_puts_1([], Acc) -> {reverse(Acc),[]}.
+
+opt_bs_puts(Is) ->
+ opt_bs_1(Is, []).
+
+opt_bs_1([{bs_put_float,Fail,{integer,Sz},1,Flags0,Src}=I0|Is], Acc) ->
+ case catch eval_put_float(Src, Sz, Flags0) of
+ {'EXIT',_} ->
+ opt_bs_1(Is, [I0|Acc]);
+ <<Int:Sz>> ->
+ Flags = force_big(Flags0),
+ I = {bs_put_integer,Fail,{integer,Sz},1,Flags,{integer,Int}},
+ opt_bs_1([I|Is], Acc)
+ end;
+opt_bs_1([{bs_put_integer,_,{integer,8},1,_,{integer,_}}|_]=IsAll, Acc0) ->
+ {Is,Acc} = bs_collect_string(IsAll, Acc0),
+ opt_bs_1(Is, Acc);
+opt_bs_1([{bs_put_integer,Fail,{integer,Sz},1,F,{integer,N}}=I|Is0], Acc) when Sz > 8 ->
+ case field_endian(F) of
+ big ->
+ case bs_split_int(N, Sz, Fail, Is0) of
+ no_split -> opt_bs_1(Is0, [I|Acc]);
+ Is -> opt_bs_1(Is, Acc)
+ end;
+ little ->
+ case catch <<N:Sz/little>> of
+ {'EXIT',_} ->
+ opt_bs_1(Is0, [I|Acc]);
+ <<Int:Sz>> ->
+ Flags = force_big(F),
+ Is = [{bs_put_integer,Fail,{integer,Sz},1,
+ Flags,{integer,Int}}|Is0],
+ opt_bs_1(Is, Acc)
+ end;
+ native -> opt_bs_1(Is0, [I|Acc])
+ end;
+opt_bs_1([{Op,Fail,{integer,Sz},U,F,Src}|Is], Acc) when U > 1 ->
+ opt_bs_1([{Op,Fail,{integer,U*Sz},1,F,Src}|Is], Acc);
+opt_bs_1([I|Is], Acc) ->
+ opt_bs_1(Is, [I|Acc]);
+opt_bs_1([], Acc) -> reverse(Acc).
+
+eval_put_float(Src, Sz, Flags) ->
+ Val = value(Src),
+ case field_endian(Flags) of
+ little -> <<Val:Sz/little-float-unit:1>>;
+ big -> <<Val:Sz/big-float-unit:1>>
+ %% native intentionally not handled here - we can't optimize it.
+ end.
+
+value({integer,I}) -> I;
+value({float,F}) -> F;
+value({atom,A}) -> A.
+
+bs_collect_string(Is, [{bs_put_string,Len,{string,Str}}|Acc]) ->
+ bs_coll_str_1(Is, Len, reverse(Str), Acc);
+bs_collect_string(Is, Acc) ->
+ bs_coll_str_1(Is, 0, [], Acc).
+
+bs_coll_str_1([{bs_put_integer,_,{integer,Sz},U,_,{integer,V}}|Is],
+ Len, StrAcc, IsAcc) when U*Sz =:= 8 ->
+ Byte = V band 16#FF,
+ bs_coll_str_1(Is, Len+1, [Byte|StrAcc], IsAcc);
+bs_coll_str_1(Is, Len, StrAcc, IsAcc) ->
+ {Is,[{bs_put_string,Len,{string,reverse(StrAcc)}}|IsAcc]}.
+
+field_endian({field_flags,F}) -> field_endian_1(F).
+
+field_endian_1([big=E|_]) -> E;
+field_endian_1([little=E|_]) -> E;
+field_endian_1([native=E|_]) -> E;
+field_endian_1([_|Fs]) -> field_endian_1(Fs).
+
+force_big({field_flags,F}) ->
+ {field_flags,force_big_1(F)}.
+
+force_big_1([big|_]=Fs) -> Fs;
+force_big_1([little|Fs]) -> [big|Fs];
+force_big_1([F|Fs]) -> [F|force_big_1(Fs)].
+
+bs_split_int(0, Sz, _, _) when Sz > 64 ->
+ %% We don't want to split in this case because the
+ %% string will consist of only zeroes.
+ no_split;
+bs_split_int(N, Sz, Fail, Acc) ->
+ FirstByteSz = case Sz rem 8 of
+ 0 -> 8;
+ Rem -> Rem
+ end,
+ bs_split_int_1(N, FirstByteSz, Sz, Fail, Acc).
+
+bs_split_int_1(N, ByteSz, Sz, Fail, Acc) when Sz > 0 ->
+ Mask = (1 bsl ByteSz) - 1,
+ I = {bs_put_integer,Fail,{integer,ByteSz},1,
+ {field_flags,[big]},{integer,N band Mask}},
+ bs_split_int_1(N bsr ByteSz, 8, Sz-ByteSz, Fail, [I|Acc]);
+bs_split_int_1(_, _, _, _, Acc) -> Acc.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_bool.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_bool.erl
new file mode 100644
index 0000000000..3180a22433
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_bool.erl
@@ -0,0 +1,617 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_bool.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%% Purpose: Optimizes booleans in guards.
+
+-module(beam_bool).
+
+-export([module/2]).
+
+-import(lists, [reverse/1,foldl/3,mapfoldl/3,sort/1,member/2]).
+-define(MAXREG, 1024).
+
+-record(st,
+ {next, %Next label number.
+ ll %Live regs at labels.
+ }).
+
+module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
+ %%io:format("~p:\n", [Mod]),
+ {Fs,_} = mapfoldl(fun(Fn, Lbl) -> function(Fn, Lbl) end, 100000000, Fs0),
+ {ok,{Mod,Exp,Attr,Fs,Lc}}.
+
+function({function,Name,Arity,CLabel,Is0}, Lbl0) ->
+ %%io:format("~p/~p:\n", [Name,Arity]),
+ {Is,#st{next=Lbl}} = bool_opt(Is0, Lbl0),
+ {{function,Name,Arity,CLabel,Is},Lbl}.
+
+%%
+%% Optimize boolean expressions that use guard bifs. Rewrite to
+%% use test instructions if possible.
+%%
+
+bool_opt(Asm, Lbl) ->
+ LiveInfo = index_instructions(Asm),
+ bopt(Asm, [], #st{next=Lbl,ll=LiveInfo}).
+
+bopt([{block,Bl0}=Block|
+ [{jump,{f,Succ}},
+ {label,Fail},
+ {block,[{set,[Dst],[{atom,false}],move},{'%live',Live}]},
+ {label,Succ}|Is]=Is0], Acc0, St) ->
+ case split_block(Bl0, Dst, Fail) of
+ failed ->
+ bopt(Is0, [Block|Acc0], St);
+ {Bl,PreBlock} ->
+ Acc1 = case PreBlock of
+ [] -> Acc0;
+ _ -> [{block,PreBlock}|Acc0]
+ end,
+ Acc = [{protected,[Dst],Bl,{Fail,Succ,Live}}|Acc1],
+ bopt(Is, Acc, St)
+ end;
+bopt([{test,is_eq_exact,{f,Fail},[Reg,{atom,true}]}=I|Is], [{block,_}|_]=Acc0, St0) ->
+ case bopt_block(Reg, Fail, Is, Acc0, St0) of
+ failed -> bopt(Is, [I|Acc0], St0);
+ {Acc,St} -> bopt(Is, Acc, St)
+ end;
+bopt([I|Is], Acc, St) ->
+ bopt(Is, [I|Acc], St);
+bopt([], Acc, St) ->
+ {bopt_reverse(Acc, []),St}.
+
+bopt_reverse([{protected,[Dst],Block,{Fail,Succ,Live}}|Is], Acc0) ->
+ Acc = [{block,Block},{jump,{f,Succ}},
+ {label,Fail},
+ {block,[{set,[Dst],[{atom,false}],move},{'%live',Live}]},
+ {label,Succ}|Acc0],
+ bopt_reverse(Is, Acc);
+bopt_reverse([I|Is], Acc) ->
+ bopt_reverse(Is, [I|Acc]);
+bopt_reverse([], Acc) -> Acc.
+
+%% bopt_block(Reg, Fail, OldIs, Accumulator, St) -> failed | {NewAcc,St}
+%% Attempt to optimized a block of guard BIFs followed by a test
+%% instruction.
+bopt_block(Reg, Fail, OldIs, [{block,Bl0}|Acc0], St0) ->
+ case split_block(Bl0, Reg, Fail) of
+ failed ->
+ %% Reason for failure: The block either contained no
+ %% guard BIFs with the failure label Fail, or the final
+ %% instruction in the block did not assign the Reg register.
+
+ %%io:format("split ~p: ~P\n", [Reg,Bl0,20]),
+ failed;
+ {Bl1,BlPre} ->
+ %% The block has been splitted. Bl1 is a non-empty list
+ %% of guard BIF instructions having the failure label Fail.
+ %% BlPre is a (possibly empty list) of instructions preceeding
+ %% Bl1.
+ Acc1 = make_block(BlPre, Acc0),
+ {Bl,Acc} = extend_block(Bl1, Fail, Acc1),
+ case catch bopt_block_1(Bl, Fail, St0) of
+ {'EXIT',_Reason} ->
+ %% Optimization failed for one of the following reasons:
+ %%
+ %% 1. Not possible to rewrite because a boolean value is
+ %% passed to another guard bif, e.g. 'abs(A > B)'
+ %% (in this case, obviously nonsense code). Rare in
+ %% practice.
+ %%
+ %% 2. Not possible to rewrite because we have not seen
+ %% the complete boolan expression (it is spread out
+ %% over several blocks with jumps and labels).
+ %% The 'or' and 'and' instructions need to that fully
+ %% known operands in order to be eliminated.
+ %%
+ %% 3. Other bug or limitation.
+
+ %%io:format("~P\n", [_Reason,20]),
+ failed;
+ {NewCode,St} ->
+ case is_opt_safe(Bl, NewCode, OldIs, St) of
+ false ->
+ %% The optimization is not safe. (A register
+ %% used by the instructions following the
+ %% optimized code is either not assigned a
+ %% value at all or assigned a different value.)
+
+ %%io:format("\nNot safe:\n"),
+ %%io:format("~p\n", [Bl]),
+ %%io:format("~p\n", [reverse(NewCode)]),
+ failed;
+ true -> {NewCode++Acc,St}
+ end
+ end
+ end.
+
+bopt_block_1(Block, Fail, St) ->
+ {Pre0,[{_,Tree}]} = bopt_tree(Block),
+ Pre = update_fail_label(Pre0, Fail, []),
+ bopt_cg(Tree, Fail, make_block(Pre, []), St).
+
+%% is_opt_safe(OriginalCode, OptCode, FollowingCode, State) -> true|false
+%% Comparing the original code to the optimized code, determine
+%% whether the optimized code is guaranteed to work in the same
+%% way as the original code.
+
+is_opt_safe(Bl, NewCode, OldIs, St) ->
+ %% Here are the conditions that must be true for the
+ %% optimization to be safe.
+ %%
+ %% 1. Any register that was assigned a value in the original
+ %% code, but is not in the optimized code, must be guaranteed
+ %% to be KILLED in the following code. (NotSet below.)
+ %%
+ %% 2. Any register that is assigned a value in the optimized
+ %% code must be UNUSED in the following code. (NewDst, Set.)
+ %% (Possible future improvement: Registers that are known
+ %% to be assigned the SAME value in the original and optimized
+ %% code don't need to be unused in the following code.)
+
+ PrevDst = dst_regs(Bl),
+ NewDst = dst_regs(NewCode),
+ NotSet = ordsets:subtract(PrevDst, NewDst),
+
+ %% Note: The following line is an optimization. We don't need
+ %% to test whether variables in NotSet for being unused, because
+ %% they will all be tested for being killed (a stronger condition
+ %% than being unused).
+
+ Set = ordsets:subtract(NewDst, NotSet),
+
+ all_killed(NotSet, OldIs, St) andalso
+ none_used(Set, OldIs, St).
+
+% update_fail_label([{set,_,_,{bif,_,{f,0}}}=I|Is], Fail, Acc) ->
+% update_fail_label(Is, Fail, [I|Acc]);
+update_fail_label([{set,Ds,As,{bif,N,{f,_}}}|Is], Fail, Acc) ->
+ update_fail_label(Is, Fail, [{set,Ds,As,{bif,N,{f,Fail}}}|Acc]);
+update_fail_label([], _, Acc) -> Acc.
+
+make_block([], Acc) -> Acc;
+make_block(Bl, Acc) -> [{block,Bl}|Acc].
+
+extend_block(BlAcc, Fail, [{protected,_,_,_}=Prot|OldAcc]) ->
+ extend_block([Prot|BlAcc], Fail, OldAcc);
+extend_block(BlAcc0, Fail, [{block,Is0}|OldAcc]=OldAcc0) ->
+ case extend_block_1(reverse(Is0), Fail, BlAcc0) of
+ {[],_} -> {BlAcc0,OldAcc0};
+ {BlAcc,[]} -> extend_block(BlAcc, Fail, OldAcc);
+ {BlAcc,Is} -> {BlAcc,[{block,Is}|OldAcc]}
+ end;
+extend_block(BlAcc, _, OldAcc) -> {BlAcc,OldAcc}.
+
+extend_block_1([{set,[_],_,{bif,_,{f,Fail}}}=I|Is], Fail, Acc) ->
+ extend_block_1(Is, Fail, [I|Acc]);
+extend_block_1([{set,[_],As,{bif,Bif,_}}=I|Is]=Is0, Fail, Acc) ->
+ case safe_bool_op(Bif, length(As)) of
+ false -> {Acc,reverse(Is0)};
+ true -> extend_block_1(Is, Fail, [I|Acc])
+ end;
+extend_block_1([_|_]=Is, _, Acc) -> {Acc,reverse(Is)};
+extend_block_1([], _, Acc) -> {Acc,[]}.
+
+split_block(Is0, Dst, Fail) ->
+ case reverse(Is0) of
+ [{'%live',_}|[{set,[Dst],_,_}|_]=Is] ->
+ split_block_1(Is, Fail);
+ [{set,[Dst],_,_}|_]=Is ->
+ split_block_1(Is, Fail);
+ _ -> failed
+ end.
+
+split_block_1(Is, Fail) ->
+ case split_block_2(Is, Fail, []) of
+ {[],_} -> failed;
+ {_,_}=Res -> Res
+ end.
+
+% split_block_2([{set,[_],_,{bif,_,{f,0}}}=I|Is], Fail, Acc) ->
+% split_block_2(Is, Fail, [I|Acc]);
+split_block_2([{set,[_],_,{bif,_,{f,Fail}}}=I|Is], Fail, Acc) ->
+ split_block_2(Is, Fail, [I|Acc]);
+split_block_2([{'%live',_}|Is], Fail, Acc) ->
+ split_block_2(Is, Fail, Acc);
+split_block_2(Is, _, Acc) -> {Acc,reverse(Is)}.
+
+dst_regs(Is) ->
+ dst_regs(Is, []).
+
+dst_regs([{block,Bl}|Is], Acc) ->
+ dst_regs(Bl, dst_regs(Is, Acc));
+dst_regs([{set,[D],_,{bif,_,{f,_}}}|Is], Acc) ->
+ dst_regs(Is, [D|Acc]);
+dst_regs([_|Is], Acc) ->
+ dst_regs(Is, Acc);
+dst_regs([], Acc) -> ordsets:from_list(Acc).
+
+all_killed([R|Rs], OldIs, St) ->
+ case is_killed(R, OldIs, St) of
+ false -> false;
+ true -> all_killed(Rs, OldIs, St)
+ end;
+all_killed([], _, _) -> true.
+
+none_used([R|Rs], OldIs, St) ->
+ case is_not_used(R, OldIs, St) of
+ false -> false;
+ true -> none_used(Rs, OldIs, St)
+ end;
+none_used([], _, _) -> true.
+
+bopt_tree(Block0) ->
+ Block = ssa_block(Block0),
+ Reg = free_variables(Block),
+ %%io:format("~p\n", [Block]),
+ %%io:format("~p\n", [Reg]),
+ Res = bopt_tree_1(Block, Reg, []),
+ %%io:format("~p\n", [Res]),
+ Res.
+
+bopt_tree_1([{set,[Dst],As0,{bif,'not',_}}|Is], Forest0, Pre) ->
+ {[Arg],Forest1} = bopt_bool_args(As0, Forest0),
+ Forest = gb_trees:enter(Dst, {'not',Arg}, Forest1),
+ bopt_tree_1(Is, Forest, Pre);
+bopt_tree_1([{set,[Dst],As0,{bif,'and',_}}|Is], Forest0, Pre) ->
+ {As,Forest1} = bopt_bool_args(As0, Forest0),
+ AndList = make_and_list(As),
+ Forest = gb_trees:enter(Dst, {'and',AndList}, Forest1),
+ bopt_tree_1(Is, Forest, Pre);
+bopt_tree_1([{set,[Dst],[L0,R0],{bif,'or',_}}|Is], Forest0, Pre) ->
+ L = gb_trees:get(L0, Forest0),
+ R = gb_trees:get(R0, Forest0),
+ Forest1 = gb_trees:delete(L0, gb_trees:delete(R0, Forest0)),
+ OrList = make_or_list([L,R]),
+ Forest = gb_trees:enter(Dst, {'or',OrList}, Forest1),
+ bopt_tree_1(Is, Forest, Pre);
+bopt_tree_1([{protected,[Dst],_,_}=Prot|Is], Forest0, Pre) ->
+ Forest = gb_trees:enter(Dst, Prot, Forest0),
+ bopt_tree_1(Is, Forest, Pre);
+bopt_tree_1([{set,[Dst],As,{bif,N,_}}=Bif|Is], Forest0, Pre) ->
+ Ar = length(As),
+ case safe_bool_op(N, Ar) of
+ false ->
+ bopt_good_args(As, Forest0),
+ Forest = gb_trees:enter(Dst, any, Forest0),
+ bopt_tree_1(Is, Forest, [Bif|Pre]);
+ true ->
+ bopt_good_args(As, Forest0),
+ Test = bif_to_test(Dst, N, As),
+ Forest = gb_trees:enter(Dst, Test, Forest0),
+ bopt_tree_1(Is, Forest, Pre)
+ end;
+bopt_tree_1([], Forest, Pre) ->
+ {Pre,[R || {_,V}=R <- gb_trees:to_list(Forest), V =/= any]}.
+
+safe_bool_op(internal_is_record, 3) -> true;
+safe_bool_op(N, Ar) ->
+ erl_internal:new_type_test(N, Ar) orelse erl_internal:comp_op(N, Ar).
+
+bopt_bool_args(As, Forest) ->
+ mapfoldl(fun bopt_bool_arg/2, Forest, As).
+
+bopt_bool_arg({T,_}=R, Forest) when T == x; T == y ->
+ {gb_trees:get(R, Forest),gb_trees:delete(R, Forest)};
+bopt_bool_arg(Term, Forest) ->
+ {Term,Forest}.
+
+bopt_good_args([A|As], Regs) ->
+ bopt_good_arg(A, Regs),
+ bopt_good_args(As, Regs);
+bopt_good_args([], _) -> ok.
+
+bopt_good_arg({x,_}=X, Regs) ->
+ case gb_trees:get(X, Regs) of
+ any -> ok;
+ _Other ->
+ %%io:format("not any: ~p: ~p\n", [X,_Other]),
+ exit(bad_contents)
+ end;
+bopt_good_arg(_, _) -> ok.
+
+bif_to_test(_, N, As) ->
+ bif_to_test(N, As).
+
+bif_to_test(internal_is_record, [_,_,_]=As) ->
+ {test,internal_is_record,fail,As};
+bif_to_test('=:=', As) -> {test,is_eq_exact,fail,As};
+bif_to_test('=/=', As) -> {test,is_ne_exact,fail,As};
+bif_to_test('==', As) -> {test,is_eq,fail,As};
+bif_to_test('/=', As) -> {test,is_ne,fail,As};
+bif_to_test('=<', [L,R]) -> {test,is_ge,fail,[R,L]};
+bif_to_test('>=', As) -> {test,is_ge,fail,As};
+bif_to_test('>', [L,R]) -> {test,is_lt,fail,[R,L]};
+bif_to_test('<', As) -> {test,is_lt,fail,As};
+bif_to_test(Name, [_]=As) ->
+ case erl_internal:new_type_test(Name, 1) of
+ false -> exit({bif_to_test,Name,As,failed});
+ true -> {test,Name,fail,As}
+ end.
+
+make_and_list([{'and',As}|Is]) ->
+ make_and_list(As++Is);
+make_and_list([I|Is]) ->
+ [I|make_and_list(Is)];
+make_and_list([]) -> [].
+
+make_or_list([{'or',As}|Is]) ->
+ make_or_list(As++Is);
+make_or_list([I|Is]) ->
+ [I|make_or_list(Is)];
+make_or_list([]) -> [].
+
+%% Code generation for a boolean tree.
+
+bopt_cg({'not',Arg}, Fail, Acc, St) ->
+ I = bopt_cg_not(Arg),
+ bopt_cg(I, Fail, Acc, St);
+bopt_cg({'and',As}, Fail, Acc, St) ->
+ bopt_cg_and(As, Fail, Acc, St);
+bopt_cg({'or',As}, Fail, Acc, St0) ->
+ {Succ,St} = new_label(St0),
+ bopt_cg_or(As, Succ, Fail, Acc, St);
+bopt_cg({test,is_tuple_element,fail,[Tmp,Tuple,RecordTag]}, Fail, Acc, St) ->
+ {[{test,is_eq_exact,{f,Fail},[Tmp,RecordTag]},
+ {get_tuple_element,Tuple,0,Tmp}|Acc],St};
+bopt_cg({inverted_test,is_tuple_element,fail,[Tmp,Tuple,RecordTag]}, Fail, Acc, St) ->
+ {[{test,is_ne_exact,{f,Fail},[Tmp,RecordTag]},
+ {get_tuple_element,Tuple,0,Tmp}|Acc],St};
+bopt_cg({test,N,fail,As}, Fail, Acc, St) ->
+ Test = {test,N,{f,Fail},As},
+ {[Test|Acc],St};
+bopt_cg({inverted_test,N,fail,As}, Fail, Acc, St0) ->
+ {Lbl,St} = new_label(St0),
+ {[{label,Lbl},{jump,{f,Fail}},{test,N,{f,Lbl},As}|Acc],St};
+bopt_cg({protected,_,Bl0,{_,_,_}}, Fail, Acc, St0) ->
+ {Bl,St} = bopt_block_1(Bl0, Fail, St0),
+ {Bl++Acc,St};
+bopt_cg([_|_]=And, Fail, Acc, St) ->
+ bopt_cg_and(And, Fail, Acc, St).
+
+bopt_cg_not({'and',As0}) ->
+ As = [bopt_cg_not(A) || A <- As0],
+ {'or',As};
+bopt_cg_not({'or',As0}) ->
+ As = [bopt_cg_not(A) || A <- As0],
+ {'and',As};
+bopt_cg_not({test,Test,Fail,As}) ->
+ {inverted_test,Test,Fail,As}.
+
+bopt_cg_and([{atom,false}|_], Fail, _, St) ->
+ {[{jump,{f,Fail}}],St};
+bopt_cg_and([{atom,true}|Is], Fail, Acc, St) ->
+ bopt_cg_and(Is, Fail, Acc, St);
+bopt_cg_and([I|Is], Fail, Acc0, St0) ->
+ {Acc,St} = bopt_cg(I, Fail, Acc0, St0),
+ bopt_cg_and(Is, Fail, Acc, St);
+bopt_cg_and([], _, Acc, St) -> {Acc,St}.
+
+bopt_cg_or([I], Succ, Fail, Acc0, St0) ->
+ {Acc,St} = bopt_cg(I, Fail, Acc0, St0),
+ {[{label,Succ}|Acc],St};
+bopt_cg_or([I|Is], Succ, Fail, Acc0, St0) ->
+ {Lbl,St1} = new_label(St0),
+ {Acc,St} = bopt_cg(I, Lbl, Acc0, St1),
+ bopt_cg_or(Is, Succ, Fail, [{label,Lbl},{jump,{f,Succ}}|Acc], St).
+
+new_label(#st{next=LabelNum}=St) when is_integer(LabelNum) ->
+ {LabelNum,St#st{next=LabelNum+1}}.
+
+free_variables(Is) ->
+ E = gb_sets:empty(),
+ free_vars_1(Is, E, E).
+
+free_vars_1([{set,[Dst],As,{bif,_,_}}|Is], F0, N0) ->
+ F = gb_sets:union(F0, gb_sets:difference(var_list(As), N0)),
+ N = gb_sets:union(N0, var_list([Dst])),
+ free_vars_1(Is, F, N);
+free_vars_1([{protected,_,Pa,_}|Is], F, N) ->
+ free_vars_1(Pa++Is, F, N);
+free_vars_1([], F, _) ->
+ gb_trees:from_orddict([{K,any} || K <- gb_sets:to_list(F)]).
+
+var_list(Is) ->
+ var_list_1(Is, gb_sets:empty()).
+
+var_list_1([{x,_}=X|Is], D) ->
+ var_list_1(Is, gb_sets:add(X, D));
+var_list_1([_|Is], D) ->
+ var_list_1(Is, D);
+var_list_1([], D) -> D.
+
+%%%
+%%% Convert a block to Static Single Assignment (SSA) form.
+%%%
+
+-record(ssa,
+ {live,
+ sub}).
+
+ssa_block(Is0) ->
+ Next = ssa_first_free(Is0, 0),
+ {Is,_} = ssa_block_1(Is0, #ssa{live=Next,sub=gb_trees:empty()}, []),
+ Is.
+
+ssa_block_1([{protected,[_],Pa0,Pb}|Is], Sub0, Acc) ->
+ {Pa,Sub} = ssa_block_1(Pa0, Sub0, []),
+ Dst = ssa_last_target(Pa),
+ ssa_block_1(Is, Sub, [{protected,[Dst],Pa,Pb}|Acc]);
+ssa_block_1([{set,[Dst],As,Bif}|Is], Sub0, Acc0) ->
+ Sub1 = ssa_in_use_list(As, Sub0),
+ Sub = ssa_assign(Dst, Sub1),
+ Acc = [{set,[ssa_sub(Dst, Sub)],ssa_sub_list(As, Sub0),Bif}|Acc0],
+ ssa_block_1(Is, Sub, Acc);
+ssa_block_1([], Sub, Acc) -> {reverse(Acc),Sub}.
+
+ssa_in_use_list(As, Sub) ->
+ foldl(fun ssa_in_use/2, Sub, As).
+
+ssa_in_use({x,_}=R, #ssa{sub=Sub0}=Ssa) ->
+ case gb_trees:is_defined(R, Sub0) of
+ true -> Ssa;
+ false ->
+ Sub = gb_trees:insert(R, R, Sub0),
+ Ssa#ssa{sub=Sub}
+ end;
+ssa_in_use(_, Ssa) -> Ssa.
+
+ssa_assign({x,_}=R, #ssa{sub=Sub0}=Ssa0) ->
+ case gb_trees:is_defined(R, Sub0) of
+ false ->
+ Sub = gb_trees:insert(R, R, Sub0),
+ Ssa0#ssa{sub=Sub};
+ true ->
+ {NewReg,Ssa} = ssa_new_reg(Ssa0),
+ Sub1 = gb_trees:update(R, NewReg, Sub0),
+ Sub = gb_trees:insert(NewReg, NewReg, Sub1),
+ Ssa#ssa{sub=Sub}
+ end;
+ssa_assign(_, Ssa) -> Ssa.
+
+ssa_sub_list(List, Sub) ->
+ [ssa_sub(E, Sub) || E <- List].
+
+ssa_sub(R0, #ssa{sub=Sub}) ->
+ case gb_trees:lookup(R0, Sub) of
+ none -> R0;
+ {value,R} -> R
+ end.
+
+ssa_new_reg(#ssa{live=Reg}=Ssa) ->
+ {{x,Reg},Ssa#ssa{live=Reg+1}}.
+
+ssa_first_free([{protected,Ds,_,_}|Is], Next0) ->
+ Next = ssa_first_free_list(Ds, Next0),
+ ssa_first_free(Is, Next);
+ssa_first_free([{set,[Dst],As,_}|Is], Next0) ->
+ Next = ssa_first_free_list([Dst|As], Next0),
+ ssa_first_free(Is, Next);
+ssa_first_free([], Next) -> Next.
+
+ssa_first_free_list(Regs, Next) ->
+ foldl(fun({x,R}, N) when R >= N -> R+1;
+ (_, N) -> N end, Next, Regs).
+
+ssa_last_target([{set,[Dst],_,_},{'%live',_}]) -> Dst;
+ssa_last_target([{set,[Dst],_,_}]) -> Dst;
+ssa_last_target([_|Is]) -> ssa_last_target(Is).
+
+%% index_instructions(FunctionIs) -> GbTree([{Label,Is}])
+%% Index the instruction sequence so that we can quickly
+%% look up the instruction following a specific label.
+
+index_instructions(Is) ->
+ ii_1(Is, []).
+
+ii_1([{label,Lbl}|Is0], Acc) ->
+ Is = lists:dropwhile(fun({label,_}) -> true;
+ (_) -> false end, Is0),
+ ii_1(Is0, [{Lbl,Is}|Acc]);
+ii_1([_|Is], Acc) ->
+ ii_1(Is, Acc);
+ii_1([], Acc) -> gb_trees:from_orddict(sort(Acc)).
+
+%% is_killed(Register, [Instruction], State) -> true|false
+%% Determine whether a register is killed in the instruction sequence.
+%% The state is used to allow us to determine the kill state
+%% across branches.
+
+is_killed(R, Is, St) ->
+ case is_killed_1(R, Is, St) of
+ false ->
+ %%io:format("nk ~p: ~P\n", [R,Is,15]),
+ false;
+ true -> true
+ end.
+
+is_killed_1(R, [{block,Blk}|Is], St) ->
+ case is_killed_1(R, Blk, St) of
+ true -> true;
+ false -> is_killed_1(R, Is, St)
+ end;
+is_killed_1(R, [{test,_,{f,Fail},As}|Is], St) ->
+ case not member(R, As) andalso is_reg_killed_at(R, Fail, St) of
+ false -> false;
+ true -> is_killed_1(R, Is, St)
+ end;
+is_killed_1(R, [{select_val,R,_,_}|_], _) -> false;
+is_killed_1(R, [{select_val,_,Fail,{list,Branches}}|_], St) ->
+ is_killed_at_all(R, [Fail|Branches], St);
+is_killed_1(R, [{jump,{f,F}}|_], St) ->
+ is_reg_killed_at(R, F, St);
+is_killed_1(Reg, Is, _) ->
+ beam_block:is_killed(Reg, Is).
+
+is_reg_killed_at(R, Lbl, #st{ll=Ll}=St) ->
+ Is = gb_trees:get(Lbl, Ll),
+ is_killed_1(R, Is, St).
+
+is_killed_at_all(R, [{f,Lbl}|T], St) ->
+ case is_reg_killed_at(R, Lbl, St) of
+ false -> false;
+ true -> is_killed_at_all(R, T, St)
+ end;
+is_killed_at_all(R, [_|T], St) ->
+ is_killed_at_all(R, T, St);
+is_killed_at_all(_, [], _) -> true.
+
+%% is_not_used(Register, [Instruction], State) -> true|false
+%% Determine whether a register is never used in the instruction sequence
+%% (it could still referenced by an allocate instruction, meaning that
+%% it MUST be initialized).
+%% The state is used to allow us to determine the usage state
+%% across branches.
+
+is_not_used(R, Is, St) ->
+ case is_not_used_1(R, Is, St) of
+ false ->
+ %%io:format("used ~p: ~P\n", [R,Is,15]),
+ false;
+ true -> true
+ end.
+
+is_not_used_1(R, [{block,Blk}|Is], St) ->
+ case is_not_used_1(R, Blk, St) of
+ true -> true;
+ false -> is_not_used_1(R, Is, St)
+ end;
+is_not_used_1(R, [{test,_,{f,Fail},As}|Is], St) ->
+ case not member(R, As) andalso is_reg_not_used_at(R, Fail, St) of
+ false -> false;
+ true -> is_not_used_1(R, Is, St)
+ end;
+is_not_used_1(R, [{select_val,R,_,_}|_], _) -> false;
+is_not_used_1(R, [{select_val,_,Fail,{list,Branches}}|_], St) ->
+ is_used_at_none(R, [Fail|Branches], St);
+is_not_used_1(R, [{jump,{f,F}}|_], St) ->
+ is_reg_not_used_at(R, F, St);
+is_not_used_1(Reg, Is, _) ->
+ beam_block:is_not_used(Reg, Is).
+
+is_reg_not_used_at(R, Lbl, #st{ll=Ll}=St) ->
+ Is = gb_trees:get(Lbl, Ll),
+ is_not_used_1(R, Is, St).
+
+is_used_at_none(R, [{f,Lbl}|T], St) ->
+ case is_reg_not_used_at(R, Lbl, St) of
+ false -> false;
+ true -> is_used_at_none(R, T, St)
+ end;
+is_used_at_none(R, [_|T], St) ->
+ is_used_at_none(R, T, St);
+is_used_at_none(_, [], _) -> true.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_clean.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_clean.erl
new file mode 100644
index 0000000000..d47ae9c896
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_clean.erl
@@ -0,0 +1,232 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_clean.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%% Purpose : Clean up, such as removing unused labels and unused functions.
+
+-module(beam_clean).
+
+-export([module/2]).
+-import(lists, [member/2,map/2,foldl/3,mapfoldl/3,reverse/1]).
+
+module({Mod,Exp,Attr,Fs0,_}, _Opt) ->
+ Order = [Lbl || {function,_,_,Lbl,_} <- Fs0],
+ All = foldl(fun({function,_,_,Lbl,_}=Func,D) -> dict:store(Lbl, Func, D) end,
+ dict:new(), Fs0),
+ {WorkList,Used0} = exp_to_labels(Fs0, Exp),
+ Used = find_all_used(WorkList, All, Used0),
+ Fs1 = remove_unused(Order, Used, All),
+ {Fs,Lc} = clean_labels(Fs1),
+ {ok,{Mod,Exp,Attr,Fs,Lc}}.
+
+%% Convert the export list ({Name,Arity} pairs) to a list of entry labels.
+
+exp_to_labels(Fs, Exp) -> exp_to_labels(Fs, Exp, [], sets:new()).
+
+exp_to_labels([{function,Name,Arity,Lbl,_}|Fs], Exp, Acc, Used) ->
+ case member({Name,Arity}, Exp) of
+ true -> exp_to_labels(Fs, Exp, [Lbl|Acc], sets:add_element(Lbl, Used));
+ false -> exp_to_labels(Fs, Exp, Acc, Used)
+ end;
+exp_to_labels([], _, Acc, Used) -> {Acc,Used}.
+
+%% Remove the unused functions.
+
+remove_unused([F|Fs], Used, All) ->
+ case sets:is_element(F, Used) of
+ false -> remove_unused(Fs, Used, All);
+ true -> [dict:fetch(F, All)|remove_unused(Fs, Used, All)]
+ end;
+remove_unused([], _, _) -> [].
+
+%% Find all used functions.
+
+find_all_used([F|Fs0], All, Used0) ->
+ {function,_,_,_,Code} = dict:fetch(F, All),
+ {Fs,Used} = update_work_list(Code, {Fs0,Used0}),
+ find_all_used(Fs, All, Used);
+find_all_used([], _All, Used) -> Used.
+
+update_work_list([{call,_,{f,L}}|Is], Sets) ->
+ update_work_list(Is, add_to_work_list(L, Sets));
+update_work_list([{call_last,_,{f,L},_}|Is], Sets) ->
+ update_work_list(Is, add_to_work_list(L, Sets));
+update_work_list([{call_only,_,{f,L}}|Is], Sets) ->
+ update_work_list(Is, add_to_work_list(L, Sets));
+update_work_list([{make_fun,{f,L},_,_}|Is], Sets) ->
+ update_work_list(Is, add_to_work_list(L, Sets));
+update_work_list([{make_fun2,{f,L},_,_,_}|Is], Sets) ->
+ update_work_list(Is, add_to_work_list(L, Sets));
+update_work_list([_|Is], Sets) ->
+ update_work_list(Is, Sets);
+update_work_list([], Sets) -> Sets.
+
+add_to_work_list(F, {Fs,Used}=Sets) ->
+ case sets:is_element(F, Used) of
+ true -> Sets;
+ false -> {[F|Fs],sets:add_element(F, Used)}
+ end.
+
+
+%%%
+%%% Coalesce adjacent labels. Renumber all labels to eliminate gaps.
+%%% This cleanup will slightly reduce file size and slightly speed up loading.
+%%%
+%%% We also expand internal_is_record/3 to a sequence of instructions. It is done
+%%% here merely because this module will always be called even if optimization
+%%% is turned off. We don't want to do the expansion in beam_asm because we
+%%% want to see the expanded code in a .S file.
+%%%
+
+-record(st, {lmap, %Translation tables for labels.
+ entry, %Number of entry label.
+ lc %Label counter
+ }).
+
+clean_labels(Fs0) ->
+ St0 = #st{lmap=dict:new(),lc=1},
+ {Fs1,#st{lmap=Lmap,lc=Lc}} = mapfoldl(fun function_renumber/2, St0, Fs0),
+ {map(fun(F) -> function_replace(F, Lmap) end, Fs1),Lc}.
+
+function_renumber({function,Name,Arity,_Entry,Asm0}, St0) ->
+ {Asm,St} = renumber_labels(Asm0, [], St0),
+ {{function,Name,Arity,St#st.entry,Asm},St}.
+
+renumber_labels([{bif,internal_is_record,{f,_},
+ [Term,Tag,{integer,Arity}],Dst}|Is], Acc, St) ->
+ ContLabel = 900000000+2*St#st.lc,
+ FailLabel = ContLabel+1,
+ Fail = {f,FailLabel},
+ Tmp = Dst,
+ renumber_labels([{test,is_tuple,Fail,[Term]},
+ {test,test_arity,Fail,[Term,Arity]},
+ {get_tuple_element,Term,0,Tmp},
+ {test,is_eq_exact,Fail,[Tmp,Tag]},
+ {move,{atom,true},Dst},
+ {jump,{f,ContLabel}},
+ {label,FailLabel},
+ {move,{atom,false},Dst},
+ {label,ContLabel}|Is], Acc, St);
+renumber_labels([{test,internal_is_record,{f,_}=Fail,
+ [Term,Tag,{integer,Arity}]}|Is], Acc, St) ->
+ Tmp = {x,1023},
+ case Term of
+ {Reg,_} when Reg == x; Reg == y ->
+ renumber_labels([{test,is_tuple,Fail,[Term]},
+ {test,test_arity,Fail,[Term,Arity]},
+ {get_tuple_element,Term,0,Tmp},
+ {test,is_eq_exact,Fail,[Tmp,Tag]}|Is], Acc, St);
+ _ ->
+ renumber_labels([{jump,Fail}|Is], Acc, St)
+ end;
+renumber_labels([{label,Old}|Is], [{label,New}|_]=Acc, #st{lmap=D0}=St) ->
+ D = dict:store(Old, New, D0),
+ renumber_labels(Is, Acc, St#st{lmap=D});
+renumber_labels([{label,Old}|Is], Acc, St0) ->
+ New = St0#st.lc,
+ D = dict:store(Old, New, St0#st.lmap),
+ renumber_labels(Is, [{label,New}|Acc], St0#st{lmap=D,lc=New+1});
+renumber_labels([{func_info,_,_,_}=Fi|Is], Acc, St0) ->
+ renumber_labels(Is, [Fi|Acc], St0#st{entry=St0#st.lc});
+renumber_labels([I|Is], Acc, St0) ->
+ renumber_labels(Is, [I|Acc], St0);
+renumber_labels([], Acc, St0) -> {Acc,St0}.
+
+function_replace({function,Name,Arity,Entry,Asm0}, Dict) ->
+ Asm = case catch replace(Asm0, [], Dict) of
+ {'EXIT',_}=Reason ->
+ exit(Reason);
+ {error,{undefined_label,Lbl}=Reason} ->
+ io:format("Function ~s/~w refers to undefined label ~w\n",
+ [Name,Arity,Lbl]),
+ exit(Reason);
+ Asm1 when list(Asm1) -> Asm1
+ end,
+ {function,Name,Arity,Entry,Asm}.
+
+replace([{test,Test,{f,Lbl},Ops}|Is], Acc, D) ->
+ replace(Is, [{test,Test,{f,label(Lbl, D)},Ops}|Acc], D);
+replace([{select_val,R,{f,Fail0},{list,Vls0}}|Is], Acc, D) ->
+ Vls1 = map(fun ({f,L}) -> {f,label(L, D)};
+ (Other) -> Other end, Vls0),
+ Fail = label(Fail0, D),
+ case redundant_values(Vls1, Fail, []) of
+ [] ->
+ %% Oops, no choices left. The loader will not accept that.
+ %% Convert to a plain jump.
+ replace(Is, [{jump,{f,Fail}}|Acc], D);
+ Vls ->
+ replace(Is, [{select_val,R,{f,Fail},{list,Vls}}|Acc], D)
+ end;
+replace([{select_tuple_arity,R,{f,Fail},{list,Vls0}}|Is], Acc, D) ->
+ Vls = map(fun ({f,L}) -> {f,label(L, D)};
+ (Other) -> Other end, Vls0),
+ replace(Is, [{select_tuple_arity,R,{f,label(Fail, D)},{list,Vls}}|Acc], D);
+replace([{'try',R,{f,Lbl}}|Is], Acc, D) ->
+ replace(Is, [{'try',R,{f,label(Lbl, D)}}|Acc], D);
+replace([{'catch',R,{f,Lbl}}|Is], Acc, D) ->
+ replace(Is, [{'catch',R,{f,label(Lbl, D)}}|Acc], D);
+replace([{jump,{f,Lbl}}|Is], Acc, D) ->
+ replace(Is, [{jump,{f,label(Lbl, D)}}|Acc], D);
+replace([{loop_rec,{f,Lbl},R}|Is], Acc, D) ->
+ replace(Is, [{loop_rec,{f,label(Lbl, D)},R}|Acc], D);
+replace([{loop_rec_end,{f,Lbl}}|Is], Acc, D) ->
+ replace(Is, [{loop_rec_end,{f,label(Lbl, D)}}|Acc], D);
+replace([{wait,{f,Lbl}}|Is], Acc, D) ->
+ replace(Is, [{wait,{f,label(Lbl, D)}}|Acc], D);
+replace([{wait_timeout,{f,Lbl},To}|Is], Acc, D) ->
+ replace(Is, [{wait_timeout,{f,label(Lbl, D)},To}|Acc], D);
+replace([{bif,Name,{f,Lbl},As,R}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bif,Name,{f,label(Lbl, D)},As,R}|Acc], D);
+replace([{call,Ar,{f,Lbl}}|Is], Acc, D) ->
+ replace(Is, [{call,Ar,{f,label(Lbl,D)}}|Acc], D);
+replace([{call_last,Ar,{f,Lbl},N}|Is], Acc, D) ->
+ replace(Is, [{call_last,Ar,{f,label(Lbl,D)},N}|Acc], D);
+replace([{call_only,Ar,{f,Lbl}}|Is], Acc, D) ->
+ replace(Is, [{call_only,Ar,{f,label(Lbl, D)}}|Acc], D);
+replace([{make_fun,{f,Lbl},U1,U2}|Is], Acc, D) ->
+ replace(Is, [{make_fun,{f,label(Lbl, D)},U1,U2}|Acc], D);
+replace([{make_fun2,{f,Lbl},U1,U2,U3}|Is], Acc, D) ->
+ replace(Is, [{make_fun2,{f,label(Lbl, D)},U1,U2,U3}|Acc], D);
+replace([{bs_init2,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bs_init2,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D);
+replace([{bs_put_integer,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bs_put_integer,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
+replace([{bs_put_binary,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bs_put_binary,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
+replace([{bs_put_float,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bs_put_float,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
+replace([{bs_final,{f,Lbl},R}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bs_final,{f,label(Lbl, D)},R}|Acc], D);
+replace([{bs_add,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bs_add,{f,label(Lbl, D)},Src,Dst}|Acc], D);
+replace([{bs_bits_to_bytes,{f,Lbl},Bits,Dst}|Is], Acc, D) when Lbl =/= 0 ->
+ replace(Is, [{bs_bits_to_bytes,{f,label(Lbl, D)},Bits,Dst}|Acc], D);
+replace([I|Is], Acc, D) ->
+ replace(Is, [I|Acc], D);
+replace([], Acc, _) -> Acc.
+
+label(Old, D) ->
+ case dict:find(Old, D) of
+ {ok,Val} -> Val;
+ error -> throw({error,{undefined_label,Old}})
+ end.
+
+redundant_values([_,{f,Fail}|Vls], Fail, Acc) ->
+ redundant_values(Vls, Fail, Acc);
+redundant_values([Val,Lbl|Vls], Fail, Acc) ->
+ redundant_values(Vls, Fail, [Lbl,Val|Acc]);
+redundant_values([], _, Acc) -> reverse(Acc).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_dict.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_dict.erl
new file mode 100644
index 0000000000..ddab957704
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_dict.erl
@@ -0,0 +1,196 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_dict.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%% Purpose : Maintain atom, import, and export tables for assembler.
+
+-module(beam_dict).
+
+-export([new/0, opcode/2, highest_opcode/1,
+ atom/2, local/4, export/4, import/4, string/2, lambda/5,
+ atom_table/1, local_table/1, export_table/1, import_table/1,
+ string_table/1,lambda_table/1]).
+
+-record(asm_dict,
+ {atoms = [], % [{Index, Atom}]
+ exports = [], % [{F, A, Label}]
+ locals = [], % [{F, A, Label}]
+ imports = [], % [{Index, {M, F, A}]
+ strings = [], % Deep list of characters
+ lambdas = [], % [{...}]
+ next_atom = 1,
+ next_import = 0,
+ string_offset = 0,
+ highest_opcode = 0
+ }).
+
+new() ->
+ #asm_dict{}.
+
+%% Remembers highest opcode.
+
+opcode(Op, Dict) when Dict#asm_dict.highest_opcode > Op -> Dict;
+opcode(Op, Dict) -> Dict#asm_dict{highest_opcode=Op}.
+
+%% Returns the highest opcode encountered.
+
+highest_opcode(#asm_dict{highest_opcode=Op}) -> Op.
+
+%% Returns the index for an atom (adding it to the atom table if necessary).
+%% atom(Atom, Dict) -> {Index, Dict'}
+
+atom(Atom, Dict) when atom(Atom) ->
+ NextIndex = Dict#asm_dict.next_atom,
+ case lookup_store(Atom, Dict#asm_dict.atoms, NextIndex) of
+ {Index, _, NextIndex} ->
+ {Index, Dict};
+ {Index, Atoms, NewIndex} ->
+ {Index, Dict#asm_dict{atoms=Atoms, next_atom=NewIndex}}
+ end.
+
+%% Remembers an exported function.
+%% export(Func, Arity, Label, Dict) -> Dict'
+
+export(Func, Arity, Label, Dict0) when atom(Func), integer(Arity), integer(Label) ->
+ {Index, Dict1} = atom(Func, Dict0),
+ Dict1#asm_dict{exports = [{Index, Arity, Label}| Dict1#asm_dict.exports]}.
+
+%% Remembers a local function.
+%% local(Func, Arity, Label, Dict) -> Dict'
+
+local(Func, Arity, Label, Dict0) when atom(Func), integer(Arity), integer(Label) ->
+ {Index,Dict1} = atom(Func, Dict0),
+ Dict1#asm_dict{locals = [{Index,Arity,Label}| Dict1#asm_dict.locals]}.
+
+%% Returns the index for an import entry (adding it to the import table if necessary).
+%% import(Mod, Func, Arity, Dict) -> {Index, Dict'}
+
+import(Mod, Func, Arity, Dict) when atom(Mod), atom(Func), integer(Arity) ->
+ NextIndex = Dict#asm_dict.next_import,
+ case lookup_store({Mod, Func, Arity}, Dict#asm_dict.imports, NextIndex) of
+ {Index, _, NextIndex} ->
+ {Index, Dict};
+ {Index, Imports, NewIndex} ->
+ {_, D1} = atom(Mod, Dict#asm_dict{imports=Imports, next_import=NewIndex}),
+ {_, D2} = atom(Func, D1),
+ {Index, D2}
+ end.
+
+%% Returns the index for a string in the string table (adding the string to the
+%% table if necessary).
+%% string(String, Dict) -> {Offset, Dict'}
+
+string(Str, Dict) when list(Str) ->
+ #asm_dict{strings = Strings, string_offset = NextOffset} = Dict,
+ case old_string(Str, Strings) of
+ {true, Offset} ->
+ {Offset, Dict};
+ false ->
+ NewDict = Dict#asm_dict{strings = Strings++Str,
+ string_offset = NextOffset+length(Str)},
+ {NextOffset, NewDict}
+ end.
+
+%% Returns the index for a funentry (adding it to the table if necessary).
+%% lambda(Dict, Lbl, Index, Uniq, NumFree) -> {Index,Dict'}
+
+lambda(Lbl, Index, OldUniq, NumFree, #asm_dict{lambdas=Lambdas0}=Dict) ->
+ OldIndex = length(Lambdas0),
+ Lambdas = [{Lbl,{OldIndex,Lbl,Index,NumFree,OldUniq}}|Lambdas0],
+ {OldIndex,Dict#asm_dict{lambdas=Lambdas}}.
+
+%% Returns the atom table.
+%% atom_table(Dict) -> [Length,AtomString...]
+
+atom_table(#asm_dict{atoms=Atoms, next_atom=NumAtoms}) ->
+ Sorted = lists:sort(Atoms),
+ Fun = fun({_, A}) ->
+ L = atom_to_list(A),
+ [length(L)|L]
+ end,
+ {NumAtoms-1, lists:map(Fun, Sorted)}.
+
+%% Returns the table of local functions.
+%% local_table(Dict) -> {NumLocals, [{Function, Arity, Label}...]}
+
+local_table(#asm_dict{locals = Locals}) ->
+ {length(Locals),Locals}.
+
+%% Returns the export table.
+%% export_table(Dict) -> {NumExports, [{Function, Arity, Label}...]}
+
+export_table(#asm_dict{exports = Exports}) ->
+ {length(Exports), Exports}.
+
+%% Returns the import table.
+%% import_table(Dict) -> {NumImports, [{Module, Function, Arity}...]}
+
+import_table(Dict) ->
+ #asm_dict{imports = Imports, next_import = NumImports} = Dict,
+ Sorted = lists:sort(Imports),
+ Fun = fun({_, {Mod, Func, Arity}}) ->
+ {Atom0, _} = atom(Mod, Dict),
+ {Atom1, _} = atom(Func, Dict),
+ {Atom0, Atom1, Arity}
+ end,
+ {NumImports, lists:map(Fun, Sorted)}.
+
+string_table(#asm_dict{strings = Strings, string_offset = Size}) ->
+ {Size, Strings}.
+
+lambda_table(#asm_dict{locals=Loc0,lambdas=Lambdas0}) ->
+ Lambdas1 = sofs:relation(Lambdas0),
+ Loc = sofs:relation([{Lbl,{F,A}} || {F,A,Lbl} <- Loc0]),
+ Lambdas2 = sofs:relative_product1(Lambdas1, Loc),
+ Lambdas = [<<F:32,A:32,Lbl:32,Index:32,NumFree:32,OldUniq:32>> ||
+ {{_,Lbl,Index,NumFree,OldUniq},{F,A}} <- sofs:to_external(Lambdas2)],
+ {length(Lambdas),Lambdas}.
+
+%%% Local helper functions.
+
+lookup_store(Key, Dict, NextIndex) ->
+ case catch lookup_store1(Key, Dict, NextIndex) of
+ Index when integer(Index) ->
+ {Index, Dict, NextIndex};
+ {Index, NewDict} ->
+ {Index, NewDict, NextIndex+1}
+ end.
+
+lookup_store1(Key, [Pair|Dict], NextIndex) when Key > element(2, Pair) ->
+ {Index, NewDict} = lookup_store1(Key, Dict, NextIndex),
+ {Index, [Pair|NewDict]};
+lookup_store1(Key, [{Index, Key}|_Dict], _NextIndex) ->
+ throw(Index);
+lookup_store1(Key, Dict, NextIndex) ->
+ {NextIndex, [{NextIndex, Key}|Dict]}.
+
+%% Search for string Str in the string pool Pool.
+%% old_string(Str, Pool) -> false | {true, Offset}
+
+old_string(Str, Pool) ->
+ old_string(Str, Pool, 0).
+
+old_string([C|Str], [C|Pool], Index) ->
+ case lists:prefix(Str, Pool) of
+ true ->
+ {true, Index};
+ false ->
+ old_string([C|Str], Pool, Index+1)
+ end;
+old_string(Str, [_|Pool], Index) ->
+ old_string(Str, Pool, Index+1);
+old_string(_Str, [], _Index) ->
+ false.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_disasm.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_disasm.erl
new file mode 100644
index 0000000000..451b83db66
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_disasm.erl
@@ -0,0 +1,964 @@
+%% -*- erlang-indent-level: 4 -*-
+%%=======================================================================
+%% File : beam_disasm.erl
+%% Author : Kostis Sagonas
+%% Description : Disassembles an R5-R10 .beam file into symbolic BEAM code
+%%=======================================================================
+%% $Id: beam_disasm.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%=======================================================================
+%% Notes:
+%% 1. It does NOT work for .beam files of previous BEAM versions.
+%% 2. If handling of new BEAM instructions is needed, this should be
+%% inserted at the end of function resolve_inst().
+%%=======================================================================
+
+-module(beam_disasm).
+
+-export([file/1, format_error/1]).
+
+-author("Kostis Sagonas").
+
+-include("beam_opcodes.hrl").
+
+%%-----------------------------------------------------------------------
+
+-define(NO_DEBUG(Str,Xs),ok).
+-define(DEBUG(Str,Xs),io:format(Str,Xs)).
+-define(exit(Reason),exit({?MODULE,?LINE,Reason})).
+
+%%-----------------------------------------------------------------------
+%% Error information
+
+format_error({error, Module, Error}) ->
+ Module:format_error(Error);
+format_error({internal, Error}) ->
+ io_lib:format("~p: disassembly failed with reason ~P.",
+ [?MODULE, Error, 25]).
+
+%%-----------------------------------------------------------------------
+%% The main exported function
+%% File is either a file name or a binary containing the code.
+%% Returns `{beam_file, [...]}' or `{error, Module, Reason}'.
+%% Call `format_error({error, Module, Reason})' for an error string.
+%%-----------------------------------------------------------------------
+
+file(File) ->
+ case beam_lib:info(File) of
+ Info when list(Info) ->
+ {value,{chunks,Chunks}} = lists:keysearch(chunks,1,Info),
+ case catch process_chunks(File, Chunks) of
+ {'EXIT', Error} ->
+ {error, ?MODULE, {internal, Error}};
+ Result ->
+ Result
+ end;
+ Error ->
+ Error
+ end.
+
+%%-----------------------------------------------------------------------
+%% Interface might need to be revised -- do not depend on it.
+%%-----------------------------------------------------------------------
+
+process_chunks(F,ChunkInfoList) ->
+ {ok,{_,Chunks}} = beam_lib:chunks(F, ["Atom","Code","StrT","ImpT","ExpT"]),
+ [{"Atom",AtomBin},{"Code",CodeBin},{"StrT",StrBin},
+ {"ImpT",ImpBin},{"ExpT",ExpBin}] = Chunks,
+ LambdaBin = optional_chunk(F, "FunT", ChunkInfoList),
+ LocBin = optional_chunk(F, "LocT", ChunkInfoList),
+ AttrBin = optional_chunk(F, "Attr", ChunkInfoList),
+ CompBin = optional_chunk(F, "CInf", ChunkInfoList),
+ Atoms = beam_disasm_atoms(AtomBin),
+ Exports = beam_disasm_exports(ExpBin, Atoms),
+ Imports = beam_disasm_imports(ImpBin, Atoms),
+ LocFuns = beam_disasm_exports(LocBin, Atoms),
+ Lambdas = beam_disasm_lambdas(LambdaBin, Atoms),
+ Str = beam_disasm_strings(StrBin),
+ Str1 = binary_to_list(Str), %% for debugging -- use Str as far as poss.
+ Sym_Code = beam_disasm_code(CodeBin,Atoms,Imports,Str,Lambdas),
+ Attributes = beam_disasm_attributes(AttrBin),
+ CompInfo = beam_disasm_compilation_info(CompBin),
+ All = [{exports,Exports},
+ {imports,Imports},
+ {code,Sym_Code},
+ {atoms,Atoms},
+ {local_funs,LocFuns},
+ {strings,Str1},
+ {attributes,Attributes},
+ {comp_info,CompInfo}],
+ {beam_file,[Item || {_Key,Data}=Item <- All, Data =/= none]}.
+
+%%-----------------------------------------------------------------------
+%% Retrieve an optional chunk or none if the chunk doesn't exist.
+%%-----------------------------------------------------------------------
+
+optional_chunk(F, ChunkTag, ChunkInfo) ->
+ case lists:keymember(ChunkTag, 1, ChunkInfo) of
+ true ->
+ {ok,{_,[{ChunkTag,Chunk}]}} = beam_lib:chunks(F, [ChunkTag]),
+ Chunk;
+ false -> none
+ end.
+
+%%-----------------------------------------------------------------------
+%% UTILITIES -- these actually exist in file "beam_lib"
+%% -- they should be moved into a common utils file.
+%%-----------------------------------------------------------------------
+
+i32([X1,X2,X3,X4]) ->
+ (X1 bsl 24) bor (X2 bsl 16) bor (X3 bsl 8) bor X4.
+
+get_int(B) ->
+ {I, B1} = split_binary(B, 4),
+ {i32(binary_to_list(I)), B1}.
+
+%%-----------------------------------------------------------------------
+%% Disassembles the atom table of a BEAM file.
+%% - atoms are stored in order 1 ... N (N = Num_atoms, in fact),
+%% - each atom name consists of a length byte, followed by that many
+%% bytes of name
+%% (nb: atom names max 255 chars?!)
+%%-----------------------------------------------------------------------
+
+beam_disasm_atoms(AtomTabBin) ->
+ {_NumAtoms,B} = get_int(AtomTabBin),
+ disasm_atoms(B).
+
+disasm_atoms(AtomBin) ->
+ disasm_atoms(binary_to_list(AtomBin),1).
+
+disasm_atoms([Len|Xs],N) ->
+ {AtomName,Rest} = get_atom_name(Len,Xs),
+ [{N,list_to_atom(AtomName)}|disasm_atoms(Rest,N+1)];
+disasm_atoms([],_) ->
+ [].
+
+get_atom_name(Len,Xs) ->
+ get_atom_name(Len,Xs,[]).
+
+get_atom_name(N,[X|Xs],RevName) when N > 0 ->
+ get_atom_name(N-1,Xs,[X|RevName]);
+get_atom_name(0,Xs,RevName) ->
+ { lists:reverse(RevName), Xs }.
+
+%%-----------------------------------------------------------------------
+%% Disassembles the export table of a BEAM file.
+%%-----------------------------------------------------------------------
+
+beam_disasm_exports(none, _) -> none;
+beam_disasm_exports(ExpTabBin, Atoms) ->
+ {_NumAtoms,B} = get_int(ExpTabBin),
+ disasm_exports(B,Atoms).
+
+disasm_exports(Bin,Atoms) ->
+ resolve_exports(collect_exports(binary_to_list(Bin)),Atoms).
+
+collect_exports([F3,F2,F1,F0,A3,A2,A1,A0,L3,L2,L1,L0|Exps]) ->
+ [{i32([F3,F2,F1,F0]), % F = function (atom ID)
+ i32([A3,A2,A1,A0]), % A = arity (int)
+ i32([L3,L2,L1,L0])} % L = label (int)
+ |collect_exports(Exps)];
+collect_exports([]) ->
+ [].
+
+resolve_exports(Exps,Atoms) ->
+ [ {lookup_key(F,Atoms), A, L} || {F,A,L} <- Exps ].
+
+%%-----------------------------------------------------------------------
+%% Disassembles the import table of a BEAM file.
+%%-----------------------------------------------------------------------
+
+beam_disasm_imports(ExpTabBin,Atoms) ->
+ {_NumAtoms,B} = get_int(ExpTabBin),
+ disasm_imports(B,Atoms).
+
+disasm_imports(Bin,Atoms) ->
+ resolve_imports(collect_imports(binary_to_list(Bin)),Atoms).
+
+collect_imports([M3,M2,M1,M0,F3,F2,F1,F0,A3,A2,A1,A0|Exps]) ->
+ [{i32([M3,M2,M1,M0]), % M = module (atom ID)
+ i32([F3,F2,F1,F0]), % F = function (atom ID)
+ i32([A3,A2,A1,A0])} % A = arity (int)
+ |collect_imports(Exps)];
+collect_imports([]) ->
+ [].
+
+resolve_imports(Exps,Atoms) ->
+ [{extfunc,lookup_key(M,Atoms),lookup_key(F,Atoms),A} || {M,F,A} <- Exps ].
+
+%%-----------------------------------------------------------------------
+%% Disassembles the lambda (fun) table of a BEAM file.
+%%-----------------------------------------------------------------------
+
+beam_disasm_lambdas(none, _) -> none;
+beam_disasm_lambdas(<<_:32,Tab/binary>>, Atoms) ->
+ disasm_lambdas(Tab, Atoms, 0).
+
+disasm_lambdas(<<F:32,A:32,Lbl:32,Index:32,NumFree:32,OldUniq:32,More/binary>>,
+ Atoms, OldIndex) ->
+ Info = {lookup_key(F, Atoms),A,Lbl,Index,NumFree,OldUniq},
+ [{OldIndex,Info}|disasm_lambdas(More, Atoms, OldIndex+1)];
+disasm_lambdas(<<>>, _, _) -> [].
+
+%%-----------------------------------------------------------------------
+%% Disassembles the code chunk of a BEAM file:
+%% - The code is first disassembled into a long list of instructions.
+%% - This list is then split into functions and all names are resolved.
+%%-----------------------------------------------------------------------
+
+beam_disasm_code(CodeBin,Atoms,Imports,Str,Lambdas) ->
+ [_SS3,_SS2,_SS1,_SS0, % Sub-Size (length of information before code)
+ _IS3,_IS2,_IS1,_IS0, % Instruction Set Identifier (always 0)
+ _OM3,_OM2,_OM1,_OM0, % Opcode Max
+ _L3,_L2,_L1,_L0,_F3,_F2,_F1,_F0|Code] = binary_to_list(CodeBin),
+ case catch disasm_code(Code, Atoms) of
+ {'EXIT',Rsn} ->
+ ?NO_DEBUG('code disasm failed: ~p~n',[Rsn]),
+ ?exit(Rsn);
+ DisasmCode ->
+ Functions = get_function_chunks(DisasmCode),
+ LocLabels = local_labels(Functions),
+ [resolve_names(F,Imports,Str,LocLabels,Lambdas) || F <- Functions]
+ end.
+
+%%-----------------------------------------------------------------------
+
+disasm_code([B|Bs], Atoms) ->
+ {Instr,RestBs} = disasm_instr(B, Bs, Atoms),
+ [Instr|disasm_code(RestBs, Atoms)];
+disasm_code([], _) -> [].
+
+%%-----------------------------------------------------------------------
+%% Splits the code stream into chunks representing the code of functions.
+%%
+%% NOTE: code actually looks like
+%% label L1: ... label Ln:
+%% func_info ...
+%% label entry:
+%% ...
+%% <on failure, use label Li to show where things died>
+%% ...
+%% So the labels before each func_info should be included as well.
+%% Ideally, only one such label is needed, but the BEAM compiler
+%% before R8 didn't care to remove the redundant ones.
+%%-----------------------------------------------------------------------
+
+get_function_chunks([I|Code]) ->
+ {LastI,RestCode,Labs} = split_head_labels(I,Code,[]),
+ get_funs(LastI,RestCode,Labs,[]);
+get_function_chunks([]) ->
+ ?exit(empty_code_segment).
+
+get_funs(PrevI,[I|Is],RevF,RevFs) ->
+ case I of
+ {func_info,_Info} ->
+ [H|T] = RevF,
+ {Last,Fun,TrailingLabels} = split_head_labels(H,T,[]),
+ get_funs(I, Is, [PrevI|TrailingLabels], add_funs([Last|Fun],RevFs));
+ _ ->
+ get_funs(I, Is, [PrevI|RevF], RevFs)
+ end;
+get_funs(PrevI,[],RevF,RevFs) ->
+ case PrevI of
+ {int_code_end,[]} ->
+ emit_funs(add_fun(RevF,RevFs));
+ _ ->
+ ?DEBUG('warning: code segment did not end with int_code_end~n',[]),
+ emit_funs(add_funs([PrevI|RevF],RevFs))
+ end.
+
+split_head_labels({label,L},[I|Code],Labs) ->
+ split_head_labels(I,Code,[{label,L}|Labs]);
+split_head_labels(I,Code,Labs) ->
+ {I,Code,Labs}.
+
+add_fun([],Fs) ->
+ Fs;
+add_fun(F,Fs) ->
+ add_funs(F,Fs).
+
+add_funs(F,Fs) ->
+ [ lists:reverse(F) | Fs ].
+
+emit_funs(Fs) ->
+ lists:reverse(Fs).
+
+%%-----------------------------------------------------------------------
+%% Collects local labels -- I am not sure this is 100% what is needed.
+%%-----------------------------------------------------------------------
+
+local_labels(Funs) ->
+ [local_label(Fun) || Fun <- Funs].
+
+%% The first clause below attempts to provide some (limited form of)
+%% backwards compatibility; it is not needed for .beam files generated
+%% by the R8 compiler. The clause should one fine day be taken out.
+local_label([{label,_},{label,L}|Code]) ->
+ local_label([{label,L}|Code]);
+local_label([{label,_},
+ {func_info,[M0,F0,{u,A}]},
+ {label,[{u,L1}]}|_]) ->
+ {atom,M} = resolve_arg(M0),
+ {atom,F} = resolve_arg(F0),
+ {L1, {M, F, A}};
+local_label(Code) ->
+ io:format('beam_disasm: no label in ~p~n', [Code]),
+ {-666,{none,none,0}}.
+
+%%-----------------------------------------------------------------------
+%% Disassembles a single BEAM instruction; most instructions are handled
+%% in a generic way; indexing instructions are handled separately.
+%%-----------------------------------------------------------------------
+
+disasm_instr(B, Bs, Atoms) ->
+ {SymOp,Arity} = beam_opcodes:opname(B),
+ case SymOp of
+ select_val ->
+ disasm_select_inst(select_val, Bs, Atoms);
+ select_tuple_arity ->
+ disasm_select_inst(select_tuple_arity, Bs, Atoms);
+ _ ->
+ case catch decode_n_args(Arity, Bs, Atoms) of
+ {'EXIT',Rsn} ->
+ ?NO_DEBUG("decode_n_args(~p,~p) failed~n",[Arity,Bs]),
+ {{'EXIT',{SymOp,Arity,Rsn}},[]};
+ {Args,RestBs} ->
+ ?NO_DEBUG("instr ~p~n",[{SymOp,Args}]),
+ {{SymOp,Args}, RestBs}
+ end
+ end.
+
+%%-----------------------------------------------------------------------
+%% Disassembles a BEAM select_* instruction used for indexing.
+%% Currently handles {select_val,3} and {select_tuple_arity,3} insts.
+%%
+%% The arruments of a "select"-type instruction look as follows:
+%% <reg>, {f,FailLabel}, {list, <num cases>, [<case1> ... <caseN>]}
+%% where each case is of the form [symbol,{f,Label}].
+%%-----------------------------------------------------------------------
+
+disasm_select_inst(Inst, Bs, Atoms) ->
+ {X, Bs1} = decode_arg(Bs, Atoms),
+ {F, Bs2} = decode_arg(Bs1, Atoms),
+ {Z, Bs3} = decode_arg(Bs2, Atoms),
+ {U, Bs4} = decode_arg(Bs3, Atoms),
+ {u,Len} = U,
+ {List, RestBs} = decode_n_args(Len, Bs4, Atoms),
+ {{Inst,[X,F,{Z,U,List}]},RestBs}.
+
+%%-----------------------------------------------------------------------
+%% decode_arg([Byte]) -> { Arg, [Byte] }
+%%
+%% - an arg can have variable length, so we must return arg + remaining bytes
+%% - decodes an argument into its 'raw' form: { Tag, Value }
+%% several types map to a single tag, so the byte code instr must then
+%% assign a type to it
+%%-----------------------------------------------------------------------
+
+decode_arg([B|Bs]) ->
+ Tag = decode_tag(B band 2#111),
+ ?NO_DEBUG('Tag = ~p, B = ~p, Bs = ~p~n',[Tag,B,Bs]),
+ case Tag of
+ z ->
+ decode_z_tagged(Tag, B, Bs);
+ _ ->
+ %% all other cases are handled as if they were integers
+ decode_int(Tag, B, Bs)
+ end.
+
+decode_arg([B|Bs0], Atoms) ->
+ Tag = decode_tag(B band 2#111),
+ ?NO_DEBUG('Tag = ~p, B = ~p, Bs = ~p~n',[Tag,B,Bs]),
+ case Tag of
+ z ->
+ decode_z_tagged(Tag, B, Bs0);
+ a ->
+ %% atom or nil
+ case decode_int(Tag, B, Bs0) of
+ {{a,0},Bs} -> {nil,Bs};
+ {{a,I},Bs} -> {{atom,lookup_key(I, Atoms)},Bs}
+ end;
+ _ ->
+ %% all other cases are handled as if they were integers
+ decode_int(Tag, B, Bs0)
+ end.
+
+%%-----------------------------------------------------------------------
+%% Decodes an integer value. Handles positives, negatives, and bignums.
+%%
+%% Tries to do the opposite of:
+%% beam_asm:encode(1, 5) = [81]
+%% beam_asm:encode(1, 1000) = [105,232]
+%% beam_asm:encode(1, 2047) = [233,255]
+%% beam_asm:encode(1, 2048) = [25,8,0]
+%% beam_asm:encode(1,-1) = [25,255,255]
+%% beam_asm:encode(1,-4294967295) = [121,255,0,0,0,1]
+%% beam_asm:encode(1, 4294967295) = [121,0,255,255,255,255]
+%% beam_asm:encode(1, 429496729501) = [121,99,255,255,255,157]
+%%-----------------------------------------------------------------------
+
+decode_int(Tag,B,Bs) when (B band 16#08) == 0 ->
+ %% N < 16 = 4 bits, NNNN:0:TTT
+ N = B bsr 4,
+ {{Tag,N},Bs};
+decode_int(Tag,B,Bs) when (B band 16#10) == 0 ->
+ %% N < 2048 = 11 bits = 3:8 bits, NNN:01:TTT, NNNNNNNN
+ [B1|Bs1] = Bs,
+ Val0 = B band 2#11100000,
+ N = (Val0 bsl 3) bor B1,
+ ?NO_DEBUG('NNN:01:TTT, NNNNNNNN = ~n~p:01:~p, ~p = ~p~n', [Val0,Tag,B,N]),
+ {{Tag,N},Bs1};
+decode_int(Tag,B,Bs) ->
+ {Len,Bs1} = decode_int_length(B,Bs),
+ {IntBs,RemBs} = take_bytes(Len,Bs1),
+ N = build_arg(IntBs),
+ [F|_] = IntBs,
+ Num = if F > 127, Tag == i -> decode_negative(N,Len);
+ true -> N
+ end,
+ ?NO_DEBUG('Len = ~p, IntBs = ~p, Num = ~p~n', [Len,IntBs,Num]),
+ {{Tag,Num},RemBs}.
+
+decode_int_length(B,Bs) ->
+ %% The following imitates get_erlang_integer() in beam_load.c
+ %% Len is the size of the integer value in bytes
+ case B bsr 5 of
+ 7 ->
+ {Arg,ArgBs} = decode_arg(Bs),
+ case Arg of
+ {u,L} ->
+ {L+9,ArgBs}; % 9 stands for 7+2
+ _ ->
+ ?exit({decode_int,weird_bignum_sublength,Arg})
+ end;
+ L ->
+ {L+2,Bs}
+ end.
+
+decode_negative(N,Len) ->
+ N - (1 bsl (Len*8)). % 8 is number of bits in a byte
+
+%%-----------------------------------------------------------------------
+%% Decodes lists and floating point numbers.
+%%-----------------------------------------------------------------------
+
+decode_z_tagged(Tag,B,Bs) when (B band 16#08) == 0 ->
+ N = B bsr 4,
+ case N of
+ 0 -> % float
+ decode_float(Bs);
+ 1 -> % list
+ {{Tag,N},Bs};
+ 2 -> % fr
+ decode_fr(Bs);
+ 3 -> % allocation list
+ decode_alloc_list(Bs);
+ _ ->
+ ?exit({decode_z_tagged,{invalid_extended_tag,N}})
+ end;
+decode_z_tagged(_,B,_) ->
+ ?exit({decode_z_tagged,{weird_value,B}}).
+
+decode_float(Bs) ->
+ {FL,RestBs} = take_bytes(8,Bs),
+ <<Float:64/float>> = list_to_binary(FL),
+ {{float,Float},RestBs}.
+
+decode_fr(Bs) ->
+ {{u,Fr},RestBs} = decode_arg(Bs),
+ {{fr,Fr},RestBs}.
+
+decode_alloc_list(Bs) ->
+ {{u,N},RestBs} = decode_arg(Bs),
+ decode_alloc_list_1(N, RestBs, []).
+
+decode_alloc_list_1(0, RestBs, Acc) ->
+ {{u,{alloc,lists:reverse(Acc)}},RestBs};
+decode_alloc_list_1(N, Bs0, Acc) ->
+ {{u,Type},Bs1} = decode_arg(Bs0),
+ {{u,Val},Bs} = decode_arg(Bs1),
+ case Type of
+ 0 ->
+ decode_alloc_list_1(N-1, Bs, [{words,Val}|Acc]);
+ 1 ->
+ decode_alloc_list_1(N-1, Bs, [{floats,Val}|Acc])
+ end.
+
+%%-----------------------------------------------------------------------
+%% take N bytes from a stream, return { Taken_bytes, Remaining_bytes }
+%%-----------------------------------------------------------------------
+
+take_bytes(N,Bs) ->
+ take_bytes(N,Bs,[]).
+
+take_bytes(N,[B|Bs],Acc) when N > 0 ->
+ take_bytes(N-1,Bs,[B|Acc]);
+take_bytes(0,Bs,Acc) ->
+ { lists:reverse(Acc), Bs }.
+
+%%-----------------------------------------------------------------------
+%% from a list of bytes Bn,Bn-1,...,B1,B0
+%% build (Bn << 8*n) bor ... bor B1 << 8 bor B0 << 0
+%%-----------------------------------------------------------------------
+
+build_arg(Bs) ->
+ build_arg(Bs,0).
+
+build_arg([B|Bs],N) ->
+ build_arg(Bs, (N bsl 8) bor B);
+build_arg([],N) ->
+ N.
+
+%%-----------------------------------------------------------------------
+%% Decodes a bunch of arguments and returns them in a list
+%%-----------------------------------------------------------------------
+
+decode_n_args(N, Bs, Atoms) when N >= 0 ->
+ decode_n_args(N, [], Bs, Atoms).
+
+decode_n_args(N, Acc, Bs0, Atoms) when N > 0 ->
+ {A1,Bs} = decode_arg(Bs0, Atoms),
+ decode_n_args(N-1, [A1|Acc], Bs, Atoms);
+decode_n_args(0, Acc, Bs, _) ->
+ {lists:reverse(Acc),Bs}.
+
+%%-----------------------------------------------------------------------
+%% Convert a numeric tag value into a symbolic one
+%%-----------------------------------------------------------------------
+
+decode_tag(?tag_u) -> u;
+decode_tag(?tag_i) -> i;
+decode_tag(?tag_a) -> a;
+decode_tag(?tag_x) -> x;
+decode_tag(?tag_y) -> y;
+decode_tag(?tag_f) -> f;
+decode_tag(?tag_h) -> h;
+decode_tag(?tag_z) -> z;
+decode_tag(X) -> ?exit({unknown_tag,X}).
+
+%%-----------------------------------------------------------------------
+%% - replace all references {a,I} with the atom with index I (or {atom,A})
+%% - replace all references to {i,K} in an external call position with
+%% the proper MFA (position in list, first elt = 0, yields MFA to use)
+%% - resolve strings, represented as <offset, length>, into their
+%% actual values by using string table
+%% (note: string table should be passed as a BINARY so that we can
+%% use binary_to_list/3!)
+%% - convert instruction to its readable form ...
+%%
+%% Currently, only the first three are done (systematically, at least).
+%%
+%% Note: It MAY be premature to remove the lists of args, since that
+%% representation means it is simpler to iterate over all args, etc.
+%%-----------------------------------------------------------------------
+
+resolve_names(Fun, Imports, Str, Lbls, Lambdas) ->
+ [resolve_inst(Instr, Imports, Str, Lbls, Lambdas) || Instr <- Fun].
+
+%%
+%% New make_fun2/4 instruction added in August 2001 (R8).
+%% We handle it specially here to avoid adding an argument to
+%% the clause for every instruction.
+%%
+
+resolve_inst({make_fun2,Args},_,_,Lbls,Lambdas) ->
+ [OldIndex] = resolve_args(Args),
+ {value,{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}}} =
+ lists:keysearch(OldIndex, 1, Lambdas),
+ [{_,{M,_,_}}|_] = Lbls, % Slighly kludgy.
+ {make_fun2,{M,F,A},OldIndex,OldUniq,NumFree};
+resolve_inst(Instr, Imports, Str, Lbls, _Lambdas) ->
+ resolve_inst(Instr, Imports, Str, Lbls).
+
+resolve_inst({label,[{u,L}]},_,_,_) ->
+ {label,L};
+resolve_inst({func_info,RawMFA},_,_,_) ->
+ {func_info,resolve_args(RawMFA)};
+% resolve_inst(int_code_end,_,_,_,_) -> % instruction already handled
+% int_code_end; % should not really be handled here
+resolve_inst({call,[{u,N},{f,L}]},_,_,Lbls) ->
+ {call,N,catch lookup_key(L,Lbls)};
+resolve_inst({call_last,[{u,N},{f,L},{u,U}]},_,_,Lbls) ->
+ {call_last,N,catch lookup_key(L,Lbls),U};
+resolve_inst({call_only,[{u,N},{f,L}]},_,_,Lbls) ->
+ {call_only,N,catch lookup_key(L,Lbls)};
+resolve_inst({call_ext,[{u,N},{u,MFAix}]},Imports,_,_) ->
+ {call_ext,N,catch lists:nth(MFAix+1,Imports)};
+resolve_inst({call_ext_last,[{u,N},{u,MFAix},{u,X}]},Imports,_,_) ->
+ {call_ext_last,N,catch lists:nth(MFAix+1,Imports),X};
+resolve_inst({bif0,Args},Imports,_,_) ->
+ [Bif,Reg] = resolve_args(Args),
+ {extfunc,_Mod,BifName,_Arity} = lists:nth(Bif+1,Imports),
+ %?NO_DEBUG('bif0(~p, ~p)~n',[BifName,Reg]),
+ {bif,BifName,nofail,[],Reg};
+resolve_inst({bif1,Args},Imports,_,_) ->
+ [F,Bif,A1,Reg] = resolve_args(Args),
+ {extfunc,_Mod,BifName,_Arity} = lists:nth(Bif+1,Imports),
+ %?NO_DEBUG('bif1(~p, ~p, ~p, ~p, ~p)~n',[Bif,BifName,F,[A1],Reg]),
+ {bif,BifName,F,[A1],Reg};
+resolve_inst({bif2,Args},Imports,_,_) ->
+ [F,Bif,A1,A2,Reg] = resolve_args(Args),
+ {extfunc,_Mod,BifName,_Arity} = lists:nth(Bif+1,Imports),
+ %?NO_DEBUG('bif2(~p, ~p, ~p, ~p, ~p)~n',[Bif,BifName,F,[A1,A2],Reg]),
+ {bif,BifName,F,[A1,A2],Reg};
+resolve_inst({allocate,[{u,X0},{u,X1}]},_,_,_) ->
+ {allocate,X0,X1};
+resolve_inst({allocate_heap,[{u,X0},{u,X1},{u,X2}]},_,_,_) ->
+ {allocate_heap,X0,X1,X2};
+resolve_inst({allocate_zero,[{u,X0},{u,X1}]},_,_,_) ->
+ {allocate_zero,X0,X1};
+resolve_inst({allocate_heap_zero,[{u,X0},{u,X1},{u,X2}]},_,_,_) ->
+ {allocate_heap_zero,X0,X1,X2};
+resolve_inst({test_heap,[{u,X0},{u,X1}]},_,_,_) ->
+ {test_heap,X0,X1};
+resolve_inst({init,[Dst]},_,_,_) ->
+ {init,Dst};
+resolve_inst({deallocate,[{u,L}]},_,_,_) ->
+ {deallocate,L};
+resolve_inst({return,[]},_,_,_) ->
+ return;
+resolve_inst({send,[]},_,_,_) ->
+ send;
+resolve_inst({remove_message,[]},_,_,_) ->
+ remove_message;
+resolve_inst({timeout,[]},_,_,_) ->
+ timeout;
+resolve_inst({loop_rec,[Lbl,Dst]},_,_,_) ->
+ {loop_rec,Lbl,Dst};
+resolve_inst({loop_rec_end,[Lbl]},_,_,_) ->
+ {loop_rec_end,Lbl};
+resolve_inst({wait,[Lbl]},_,_,_) ->
+ {wait,Lbl};
+resolve_inst({wait_timeout,[Lbl,Int]},_,_,_) ->
+ {wait_timeout,Lbl,resolve_arg(Int)};
+resolve_inst({m_plus,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'+',W,[SrcR1,SrcR2],DstR};
+resolve_inst({m_minus,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'-',W,[SrcR1,SrcR2],DstR};
+resolve_inst({m_times,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'*',W,[SrcR1,SrcR2],DstR};
+resolve_inst({m_div,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'/',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_div,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'div',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_rem,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'rem',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_band,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'band',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_bor,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'bor',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_bxor,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'bxor',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_bsl,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'bsl',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_bsr,Args},_,_,_) ->
+ [W,SrcR1,SrcR2,DstR] = resolve_args(Args),
+ {arithbif,'bsr',W,[SrcR1,SrcR2],DstR};
+resolve_inst({int_bnot,Args},_,_,_) ->
+ [W,SrcR,DstR] = resolve_args(Args),
+ {arithbif,'bnot',W,[SrcR],DstR};
+resolve_inst({is_lt=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_ge=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_eq=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_ne=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_eq_exact=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_ne_exact=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_integer=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_float=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_number=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_atom=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_pid=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_reference=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_port=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_nil=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_binary=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_constant=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_list=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_nonempty_list=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({is_tuple=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({test_arity=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({select_val,Args},_,_,_) ->
+ [Reg,FLbl,{{z,1},{u,_Len},List0}] = Args,
+ List = resolve_args(List0),
+ {select_val,Reg,FLbl,{list,List}};
+resolve_inst({select_tuple_arity,Args},_,_,_) ->
+ [Reg,FLbl,{{z,1},{u,_Len},List0}] = Args,
+ List = resolve_args(List0),
+ {select_tuple_arity,Reg,FLbl,{list,List}};
+resolve_inst({jump,[Lbl]},_,_,_) ->
+ {jump,Lbl};
+resolve_inst({'catch',[Dst,Lbl]},_,_,_) ->
+ {'catch',Dst,Lbl};
+resolve_inst({catch_end,[Dst]},_,_,_) ->
+ {catch_end,Dst};
+resolve_inst({move,[Src,Dst]},_,_,_) ->
+ {move,resolve_arg(Src),Dst};
+resolve_inst({get_list,[Src,Dst1,Dst2]},_,_,_) ->
+ {get_list,Src,Dst1,Dst2};
+resolve_inst({get_tuple_element,[Src,{u,Off},Dst]},_,_,_) ->
+ {get_tuple_element,resolve_arg(Src),Off,resolve_arg(Dst)};
+resolve_inst({set_tuple_element,[Src,Dst,{u,Off}]},_,_,_) ->
+ {set_tuple_element,resolve_arg(Src),resolve_arg(Dst),Off};
+resolve_inst({put_string,[{u,Len},{u,Off},Dst]},_,Strings,_) ->
+ String = if Len > 0 -> binary_to_list(Strings, Off+1, Off+Len);
+ true -> ""
+ end,
+?NO_DEBUG('put_string(~p, {string,~p}, ~p)~n',[Len,String,Dst]),
+ {put_string,Len,{string,String},Dst};
+resolve_inst({put_list,[Src1,Src2,Dst]},_,_,_) ->
+ {put_list,resolve_arg(Src1),resolve_arg(Src2),Dst};
+resolve_inst({put_tuple,[{u,Arity},Dst]},_,_,_) ->
+ {put_tuple,Arity,Dst};
+resolve_inst({put,[Src]},_,_,_) ->
+ {put,resolve_arg(Src)};
+resolve_inst({badmatch,[X]},_,_,_) ->
+ {badmatch,resolve_arg(X)};
+resolve_inst({if_end,[]},_,_,_) ->
+ if_end;
+resolve_inst({case_end,[X]},_,_,_) ->
+ {case_end,resolve_arg(X)};
+resolve_inst({call_fun,[{u,N}]},_,_,_) ->
+ {call_fun,N};
+resolve_inst({make_fun,Args},_,_,Lbls) ->
+ [{f,L},Magic,FreeVars] = resolve_args(Args),
+ {make_fun,catch lookup_key(L,Lbls),Magic,FreeVars};
+resolve_inst({is_function=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+resolve_inst({call_ext_only,[{u,N},{u,MFAix}]},Imports,_,_) ->
+ {call_ext_only,N,catch lists:nth(MFAix+1,Imports)};
+%%
+%% Instructions for handling binaries added in R7A & R7B
+%%
+resolve_inst({bs_start_match,[F,Reg]},_,_,_) ->
+ {bs_start_match,F,Reg};
+resolve_inst({bs_get_integer=I,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
+ [A2,A5] = resolve_args([Arg2,Arg5]),
+ {test,I,Lbl,[A2,N,decode_field_flags(U),A5]};
+resolve_inst({bs_get_float=I,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
+ [A2,A5] = resolve_args([Arg2,Arg5]),
+ {test,I,Lbl,[A2,N,decode_field_flags(U),A5]};
+resolve_inst({bs_get_binary=I,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
+ [A2,A5] = resolve_args([Arg2,Arg5]),
+ {test,I,Lbl,[A2,N,decode_field_flags(U),A5]};
+resolve_inst({bs_skip_bits,[Lbl,Arg2,{u,N},{u,U}]},_,_,_) ->
+ [A2] = resolve_args([Arg2]),
+ {test,bs_skip_bits,Lbl,[A2,N,decode_field_flags(U)]};
+resolve_inst({bs_test_tail,[F,{u,N}]},_,_,_) ->
+ {test,bs_test_tail,F,[N]};
+resolve_inst({bs_save,[{u,N}]},_,_,_) ->
+ {bs_save,N};
+resolve_inst({bs_restore,[{u,N}]},_,_,_) ->
+ {bs_restore,N};
+resolve_inst({bs_init,[{u,N},{u,U}]},_,_,_) ->
+ {bs_init,N,decode_field_flags(U)};
+resolve_inst({bs_final,[F,X]},_,_,_) ->
+ {bs_final,F,X};
+resolve_inst({bs_put_integer,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
+ [A2,A5] = resolve_args([Arg2,Arg5]),
+ {bs_put_integer,Lbl,A2,N,decode_field_flags(U),A5};
+resolve_inst({bs_put_binary,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
+ [A2,A5] = resolve_args([Arg2,Arg5]),
+ ?NO_DEBUG('bs_put_binary(~p,~p,~p,~p,~p})~n',[Lbl,A2,N,U,A5]),
+ {bs_put_binary,Lbl,A2,N,decode_field_flags(U),A5};
+resolve_inst({bs_put_float,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
+ [A2,A5] = resolve_args([Arg2,Arg5]),
+ ?NO_DEBUG('bs_put_float(~p,~p,~p,~p,~p})~n',[Lbl,A2,N,U,A5]),
+ {bs_put_float,Lbl,A2,N,decode_field_flags(U),A5};
+resolve_inst({bs_put_string,[{u,Len},{u,Off}]},_,Strings,_) ->
+ String = if Len > 0 -> binary_to_list(Strings, Off+1, Off+Len);
+ true -> ""
+ end,
+ ?NO_DEBUG('bs_put_string(~p, {string,~p})~n',[Len,String]),
+ {bs_put_string,Len,{string,String}};
+resolve_inst({bs_need_buf,[{u,N}]},_,_,_) ->
+ {bs_need_buf,N};
+
+%%
+%% Instructions for handling floating point numbers added in June 2001 (R8).
+%%
+resolve_inst({fclearerror,[]},_,_,_) ->
+ fclearerror;
+resolve_inst({fcheckerror,Args},_,_,_) ->
+ [Fail] = resolve_args(Args),
+ {fcheckerror,Fail};
+resolve_inst({fmove,Args},_,_,_) ->
+ [FR,Reg] = resolve_args(Args),
+ {fmove,FR,Reg};
+resolve_inst({fconv,Args},_,_,_) ->
+ [Reg,FR] = resolve_args(Args),
+ {fconv,Reg,FR};
+resolve_inst({fadd=I,Args},_,_,_) ->
+ [F,A1,A2,Reg] = resolve_args(Args),
+ {arithfbif,I,F,[A1,A2],Reg};
+resolve_inst({fsub=I,Args},_,_,_) ->
+ [F,A1,A2,Reg] = resolve_args(Args),
+ {arithfbif,I,F,[A1,A2],Reg};
+resolve_inst({fmul=I,Args},_,_,_) ->
+ [F,A1,A2,Reg] = resolve_args(Args),
+ {arithfbif,I,F,[A1,A2],Reg};
+resolve_inst({fdiv=I,Args},_,_,_) ->
+ [F,A1,A2,Reg] = resolve_args(Args),
+ {arithfbif,I,F,[A1,A2],Reg};
+resolve_inst({fnegate,Args},_,_,_) ->
+ [F,Arg,Reg] = resolve_args(Args),
+ {arithfbif,fnegate,F,[Arg],Reg};
+
+%%
+%% Instructions for try expressions added in January 2003 (R10).
+%%
+
+resolve_inst({'try',[Reg,Lbl]},_,_,_) -> % analogous to 'catch'
+ {'try',Reg,Lbl};
+resolve_inst({try_end,[Reg]},_,_,_) -> % analogous to 'catch_end'
+ {try_end,Reg};
+resolve_inst({try_case,[Reg]},_,_,_) -> % analogous to 'catch_end'
+ {try_case,Reg};
+resolve_inst({try_case_end,[Reg]},_,_,_) ->
+ {try_case_end,Reg};
+resolve_inst({raise,[Reg1,Reg2]},_,_,_) ->
+ {bif,raise,{f,0},[Reg1,Reg2],{x,0}};
+
+%%
+%% New bit syntax instructions added in February 2004 (R10B).
+%%
+
+resolve_inst({bs_init2,[Lbl,Arg2,{u,W},{u,R},{u,F},Arg6]},_,_,_) ->
+ [A2,A6] = resolve_args([Arg2,Arg6]),
+ {bs_init2,Lbl,A2,W,R,decode_field_flags(F),A6};
+resolve_inst({bs_bits_to_bytes,[Lbl,Arg2,Arg3]},_,_,_) ->
+ [A2,A3] = resolve_args([Arg2,Arg3]),
+ {bs_bits_to_bytes,Lbl,A2,A3};
+resolve_inst({bs_add=I,[Lbl,Arg2,Arg3,Arg4,Arg5]},_,_,_) ->
+ [A2,A3,A4,A5] = resolve_args([Arg2,Arg3,Arg4,Arg5]),
+ {I,Lbl,[A2,A3,A4],A5};
+
+%%
+%% New apply instructions added in April 2004 (R10B).
+%%
+resolve_inst({apply,[{u,Arity}]},_,_,_) ->
+ {apply,Arity};
+resolve_inst({apply_last,[{u,Arity},{u,D}]},_,_,_) ->
+ {apply_last,Arity,D};
+
+%%
+%% New test instruction added in April 2004 (R10B).
+%%
+resolve_inst({is_boolean=I,Args0},_,_,_) ->
+ [L|Args] = resolve_args(Args0),
+ {test,I,L,Args};
+
+%%
+%% Catches instructions that are not yet handled.
+%%
+
+resolve_inst(X,_,_,_) -> ?exit({resolve_inst,X}).
+
+%%-----------------------------------------------------------------------
+%% Resolves arguments in a generic way.
+%%-----------------------------------------------------------------------
+
+resolve_args(Args) -> [resolve_arg(A) || A <- Args].
+
+resolve_arg({u,N}) -> N;
+resolve_arg({i,N}) -> {integer,N};
+resolve_arg({atom,Atom}=A) when is_atom(Atom) -> A;
+resolve_arg(nil) -> nil;
+resolve_arg(Arg) -> Arg.
+
+%%-----------------------------------------------------------------------
+%% The purpose of the following is just to add a hook for future changes.
+%% Currently, field flags are numbers 1-2-4-8 and only two of these
+%% numbers (BSF_LITTLE 2 -- BSF_SIGNED 4) have a semantic significance;
+%% others are just hints for speeding up the execution; see "erl_bits.h".
+%%-----------------------------------------------------------------------
+
+decode_field_flags(FF) ->
+ {field_flags,FF}.
+
+%%-----------------------------------------------------------------------
+%% Each string is denoted in the assembled code by its offset into this
+%% binary. This binary contains all strings concatenated together.
+%%-----------------------------------------------------------------------
+
+beam_disasm_strings(Bin) ->
+ Bin.
+
+%%-----------------------------------------------------------------------
+%% Disassembles the attributes of a BEAM file.
+%%-----------------------------------------------------------------------
+
+beam_disasm_attributes(none) -> none;
+beam_disasm_attributes(AttrBin) -> binary_to_term(AttrBin).
+
+%%-----------------------------------------------------------------------
+%% Disassembles the compilation information of a BEAM file.
+%%-----------------------------------------------------------------------
+
+beam_disasm_compilation_info(none) -> none;
+beam_disasm_compilation_info(Bin) -> binary_to_term(Bin).
+
+%%-----------------------------------------------------------------------
+%% Private Utilities
+%%-----------------------------------------------------------------------
+
+%%-----------------------------------------------------------------------
+
+lookup_key(Key,[{Key,Val}|_]) ->
+ Val;
+lookup_key(Key,[_|KVs]) ->
+ lookup_key(Key,KVs);
+lookup_key(Key,[]) ->
+ ?exit({lookup_key,{key_not_found,Key}}).
+
+%%-----------------------------------------------------------------------
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_flatten.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_flatten.erl
new file mode 100644
index 0000000000..a9958f87cd
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_flatten.erl
@@ -0,0 +1,137 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_flatten.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%% Purpose : Converts intermediate assembly code to final format.
+
+-module(beam_flatten).
+
+-export([module/2]).
+-import(lists, [reverse/1,reverse/2,map/2]).
+
+module({Mod,Exp,Attr,Fs,Lc}, _Opt) ->
+ {ok,{Mod,Exp,Attr,map(fun function/1, Fs),Lc}}.
+
+function({function,Name,Arity,CLabel,Is0}) ->
+ Is1 = block(Is0),
+ Is = opt(Is1),
+ {function,Name,Arity,CLabel,Is}.
+
+block(Is) ->
+ block(Is, []).
+
+block([{block,Is0}|Is1], Acc) -> block(Is1, norm_block(Is0, Acc));
+block([I|Is], Acc) -> block(Is, [I|Acc]);
+block([], Acc) -> reverse(Acc).
+
+norm_block([{allocate,R,Alloc}|Is], Acc0) ->
+ case insert_alloc_in_bs_init(Acc0, Alloc) of
+ not_possible ->
+ norm_block(Is, reverse(norm_allocate(Alloc, R), Acc0));
+ Acc ->
+ norm_block(Is, Acc)
+ end;
+norm_block([I|Is], Acc) -> norm_block(Is, [norm(I)|Acc]);
+norm_block([], Acc) -> Acc.
+
+norm({set,[D],As,{bif,N}}) -> {bif,N,nofail,As,D};
+norm({set,[D],As,{bif,N,F}}) -> {bif,N,F,As,D};
+norm({set,[D],[S],move}) -> {move,S,D};
+norm({set,[D],[S],fmove}) -> {fmove,S,D};
+norm({set,[D],[S],fconv}) -> {fconv,S,D};
+norm({set,[D],[S1,S2],put_list}) -> {put_list,S1,S2,D};
+norm({set,[D],[],{put_tuple,A}}) -> {put_tuple,A,D};
+norm({set,[],[S],put}) -> {put,S};
+norm({set,[D],[],{put_string,L,S}}) -> {put_string,L,S,D};
+norm({set,[D],[S],{get_tuple_element,I}}) -> {get_tuple_element,S,I,D};
+norm({set,[],[S,D],{set_tuple_element,I}}) -> {set_tuple_element,S,D,I};
+norm({set,[D1,D2],[S],get_list}) -> {get_list,S,D1,D2};
+norm({set,[],[],remove_message}) -> remove_message;
+norm({set,[],[],fclearerror}) -> fclearerror;
+norm({set,[],[],fcheckerror}) -> {fcheckerror,{f,0}};
+norm({'%',_}=Comment) -> Comment;
+norm({'%live',R}) -> {'%live',R}.
+
+norm_allocate({_Zero,nostack,Nh,[]}, Regs) ->
+ [{test_heap,Nh,Regs}];
+norm_allocate({_Zero,nostack,Nh,Nf,[]}, Regs) ->
+ [{test_heap,alloc_list(Nh, Nf),Regs}];
+norm_allocate({zero,0,Nh,[]}, Regs) ->
+ norm_allocate({nozero,0,Nh,[]}, Regs);
+norm_allocate({zero,0,Nh,Nf,[]}, Regs) ->
+ norm_allocate({nozero,0,Nh,Nf,[]}, Regs);
+norm_allocate({zero,Ns,0,[]}, Regs) ->
+ [{allocate_zero,Ns,Regs}];
+norm_allocate({zero,Ns,Nh,[]}, Regs) ->
+ [{allocate_heap_zero,Ns,Nh,Regs}];
+norm_allocate({nozero,Ns,0,Inits}, Regs) ->
+ [{allocate,Ns,Regs}|Inits];
+norm_allocate({nozero,Ns,Nh,Inits}, Regs) ->
+ [{allocate_heap,Ns,Nh,Regs}|Inits];
+norm_allocate({nozero,Ns,Nh,Floats,Inits}, Regs) ->
+ [{allocate_heap,Ns,alloc_list(Nh, Floats),Regs}|Inits];
+norm_allocate({zero,Ns,Nh,Floats,Inits}, Regs) ->
+ [{allocate_heap_zero,Ns,alloc_list(Nh, Floats),Regs}|Inits].
+
+insert_alloc_in_bs_init([I|_]=Is, Alloc) ->
+ case is_bs_put(I) of
+ false ->
+ not_possible;
+ true ->
+ insert_alloc_1(Is, Alloc, [])
+ end.
+
+insert_alloc_1([{bs_init2,Fail,Bs,Ws,Regs,F,Dst}|Is], {_,nostack,Nh,Nf,[]}, Acc) ->
+ Al = alloc_list(Ws+Nh, Nf),
+ I = {bs_init2,Fail,Bs,Al,Regs,F,Dst},
+ reverse(Acc, [I|Is]);
+insert_alloc_1([I|Is], Alloc, Acc) ->
+ insert_alloc_1(Is, Alloc, [I|Acc]).
+
+is_bs_put({bs_put_integer,_,_,_,_,_}) -> true;
+is_bs_put({bs_put_float,_,_,_,_,_}) -> true;
+is_bs_put({bs_put_binary,_,_,_,_,_}) -> true;
+is_bs_put({bs_put_string,_,_}) -> true;
+is_bs_put(_) -> false.
+
+alloc_list(Words, Floats) ->
+ {alloc,[{words,Words},{floats,Floats}]}.
+
+
+%% opt(Is0) -> Is
+%% Simple peep-hole optimization to move a {move,Any,{x,0}} past
+%% any kill up to the next call instruction.
+
+opt(Is) ->
+ opt_1(Is, []).
+
+opt_1([{move,_,{x,0}}=I|Is0], Acc0) ->
+ case move_past_kill(Is0, I, Acc0) of
+ impossible -> opt_1(Is0, [I|Acc0]);
+ {Is,Acc} -> opt_1(Is, Acc)
+ end;
+opt_1([I|Is], Acc) ->
+ opt_1(Is, [I|Acc]);
+opt_1([], Acc) -> reverse(Acc).
+
+move_past_kill([{'%live',_}|Is], Move, Acc) ->
+ move_past_kill(Is, Move, Acc);
+move_past_kill([{kill,Src}|_], {move,Src,_}, _) ->
+ impossible;
+move_past_kill([{kill,_}=I|Is], Move, Acc) ->
+ move_past_kill(Is, Move, [I|Acc]);
+move_past_kill(Is, Move, Acc) ->
+ {Is,[Move|Acc]}.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_jump.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_jump.erl
new file mode 100644
index 0000000000..fd005898b6
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_jump.erl
@@ -0,0 +1,477 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_jump.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%%% Purpose : Optimise jumps and remove unreachable code.
+
+-module(beam_jump).
+
+-export([module/2,module_labels/1,
+ is_unreachable_after/1,remove_unused_labels/1]).
+
+%%% The following optimisations are done:
+%%%
+%%% (1) This code with two identical instruction sequences
+%%%
+%%% L1: <Instruction sequence>
+%%% L2:
+%%% . . .
+%%% L3: <Instruction sequence>
+%%% L4:
+%%%
+%%% can be replaced with
+%%%
+%%% L1: jump L3
+%%% L2:
+%%% . . .
+%%% L3: <Instruction sequence>
+%%% L4
+%%%
+%%% Note: The instruction sequence must end with an instruction
+%%% such as a jump that never transfers control to the instruction
+%%% following it.
+%%%
+%%% (2) case_end, if_end, and badmatch, and function calls that cause an
+%%% exit (such as calls to exit/1) are moved to the end of the function.
+%%% The purpose is to allow further optimizations at the place from
+%%% which the code was moved.
+%%%
+%%% (3) Any unreachable code is removed. Unreachable code is code after
+%%% jump, call_last and other instructions which never transfer control
+%%% to the following instruction. Code is unreachable up to the next
+%%% *referenced* label. Note that the optimisations below might
+%%% generate more possibilities for removing unreachable code.
+%%%
+%%% (4) This code:
+%%% L1: jump L2
+%%% . . .
+%%% L2: ...
+%%%
+%%% will be changed to
+%%%
+%%% jump L2
+%%% . . .
+%%% L1:
+%%% L2: ...
+%%%
+%%% If the jump is unreachable, it will be removed according to (1).
+%%%
+%%% (5) In
+%%%
+%%% jump L1
+%%% L1:
+%%%
+%%% the jump will be removed.
+%%%
+%%% (6) If test instructions are used to skip a single jump instruction,
+%%% the test is inverted and the jump is eliminated (provided that
+%%% the test can be inverted). Example:
+%%%
+%%% is_eq L1 {x,1} {x,2}
+%%% jump L2
+%%% L1:
+%%%
+%%% will be changed to
+%%%
+%%% is_ne L2 {x,1} {x,2}
+%%%
+%%% (The label L1 will be retained if there were previous references to it.)
+%%%
+%%% (7) Some redundant uses of is_boolean/1 is optimized away.
+%%%
+%%% Terminology note: The optimisation done here is called unreachable-code
+%%% elimination, NOT dead-code elimination. Dead code elimination
+%%% means the removal of instructions that are executed, but have no visible
+%%% effect on the program state.
+%%%
+
+-import(lists, [reverse/1,reverse/2,map/2,mapfoldl/3,foldl/3,
+ last/1,foreach/2,member/2]).
+
+module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
+ Fs = map(fun function/1, Fs0),
+ {ok,{Mod,Exp,Attr,Fs,Lc}}.
+
+module_labels({Mod,Exp,Attr,Fs,Lc}) ->
+ {Mod,Exp,Attr,map(fun function_labels/1, Fs),Lc}.
+
+function_labels({function,Name,Arity,CLabel,Asm0}) ->
+ Asm = remove_unused_labels(Asm0),
+ {function,Name,Arity,CLabel,Asm}.
+
+function({function,Name,Arity,CLabel,Asm0}) ->
+ Asm1 = share(Asm0),
+ Asm2 = bopt(Asm1),
+ Asm3 = move(Asm2),
+ Asm4 = opt(Asm3, CLabel),
+ Asm = remove_unused_labels(Asm4),
+ {function,Name,Arity,CLabel,Asm}.
+
+%%%
+%%% (1) We try to share the code for identical code segments by replacing all
+%%% occurrences except the last with jumps to the last occurrence.
+%%%
+
+share(Is) ->
+ share_1(reverse(Is), gb_trees:empty(), [], []).
+
+share_1([{label,_}=Lbl|Is], Dict, [], Acc) ->
+ share_1(Is, Dict, [], [Lbl|Acc]);
+share_1([{label,L}=Lbl|Is], Dict0, Seq, Acc) ->
+ case is_unreachable_after(last(Seq)) of
+ false ->
+ share_1(Is, Dict0, [], [Lbl|Seq ++ Acc]);
+ true ->
+ case gb_trees:lookup(Seq, Dict0) of
+ none ->
+ Dict = gb_trees:insert(Seq, L, Dict0),
+ share_1(Is, Dict, [], [Lbl|Seq ++ Acc]);
+ {value,Label} ->
+ share_1(Is, Dict0, [], [Lbl,{jump,{f,Label}}|Acc])
+ end
+ end;
+share_1([{func_info,_,_,_}=I|Is], _, [], Acc) ->
+ Is++[I|Acc];
+share_1([I|Is], Dict, Seq, Acc) ->
+ case is_unreachable_after(I) of
+ false ->
+ share_1(Is, Dict, [I|Seq], Acc);
+ true ->
+ share_1(Is, Dict, [I], Acc)
+ end.
+
+%%%
+%%% (2) Move short code sequences ending in an instruction that causes an exit
+%%% to the end of the function.
+%%%
+
+move(Is) ->
+ move_1(Is, [], []).
+
+move_1([I|Is], End, Acc) ->
+ case is_exit_instruction(I) of
+ false -> move_1(Is, End, [I|Acc]);
+ true -> move_2(I, Is, End, Acc)
+ end;
+move_1([], End, Acc) ->
+ reverse(Acc, reverse(End)).
+
+move_2(Exit, Is, End, [{block,_},{label,_},{func_info,_,_,_}|_]=Acc) ->
+ move_1(Is, End, [Exit|Acc]);
+move_2(Exit, Is, End, [{kill,_Y}|Acc]) ->
+ move_2(Exit, Is, End, Acc);
+move_2(Exit, Is, End, [{block,_}=Blk,{label,_}=Lbl,Dead|More]=Acc) ->
+ case is_unreachable_after(Dead) of
+ false ->
+ move_1(Is, End, [Exit|Acc]);
+ true ->
+ move_1([Dead|Is], [Exit,Blk,Lbl|End], More)
+ end;
+move_2(Exit, Is, End, [{label,_}=Lbl,Dead|More]=Acc) ->
+ case is_unreachable_after(Dead) of
+ false ->
+ move_1(Is, End, [Exit|Acc]);
+ true ->
+ move_1([Dead|Is], [Exit,Lbl|End], More)
+ end;
+move_2(Exit, Is, End, Acc) ->
+ move_1(Is, End, [Exit|Acc]).
+
+%%%
+%%% (7) Remove redundant is_boolean tests.
+%%%
+
+bopt(Is) ->
+ bopt_1(Is, []).
+
+bopt_1([{test,is_boolean,_,_}=I|Is], Acc0) ->
+ case opt_is_bool(I, Acc0) of
+ no -> bopt_1(Is, [I|Acc0]);
+ yes -> bopt_1(Is, Acc0);
+ {yes,Acc} -> bopt_1(Is, Acc)
+ end;
+bopt_1([I|Is], Acc) -> bopt_1(Is, [I|Acc]);
+bopt_1([], Acc) -> reverse(Acc).
+
+opt_is_bool({test,is_boolean,{f,Lbl},[Reg]}, Acc) ->
+ opt_is_bool_1(Acc, Reg, Lbl).
+
+opt_is_bool_1([{test,is_eq_exact,{f,Lbl},[Reg,{atom,true}]}|_], Reg, Lbl) ->
+ %% Instruction not needed in this context.
+ yes;
+opt_is_bool_1([{test,is_ne_exact,{f,Lbl},[Reg,{atom,true}]}|Acc], Reg, Lbl) ->
+ %% Rewrite to shorter test.
+ {yes,[{test,is_eq_exact,{f,Lbl},[Reg,{atom,false}]}|Acc]};
+opt_is_bool_1([{test,_,{f,Lbl},_}=Test|Acc0], Reg, Lbl) ->
+ case opt_is_bool_1(Acc0, Reg, Lbl) of
+ {yes,Acc} -> {yes,[Test|Acc]};
+ Other -> Other
+ end;
+opt_is_bool_1(_, _, _) -> no.
+
+%%%
+%%% (3) (4) (5) (6) Jump and unreachable code optimizations.
+%%%
+
+-record(st, {fc, %Label for function class errors.
+ entry, %Entry label (must not be moved).
+ mlbl, %Moved labels.
+ labels %Set of referenced labels.
+ }).
+
+opt([{label,Fc}|_]=Is, CLabel) ->
+ Lbls = initial_labels(Is),
+ St = #st{fc=Fc,entry=CLabel,mlbl=dict:new(),labels=Lbls},
+ opt(Is, [], St).
+
+opt([{test,Test0,{f,Lnum}=Lbl,Ops}=I|Is0], Acc, St) ->
+ case Is0 of
+ [{jump,To}|[{label,Lnum}|Is2]=Is1] ->
+ case invert_test(Test0) of
+ not_possible ->
+ opt(Is0, [I|Acc], label_used(Lbl, St));
+ Test ->
+ Is = case is_label_used(Lnum, St) of
+ true -> Is1;
+ false -> Is2
+ end,
+ opt([{test,Test,To,Ops}|Is], Acc, label_used(To, St))
+ end;
+ _Other ->
+ opt(Is0, [I|Acc], label_used(Lbl, St))
+ end;
+opt([{select_val,_R,Fail,{list,Vls}}=I|Is], Acc, St) ->
+ skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St));
+opt([{select_tuple_arity,_R,Fail,{list,Vls}}=I|Is], Acc, St) ->
+ skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St));
+opt([{'try',_R,Lbl}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{'catch',_R,Lbl}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{label,L}=I|Is], Acc, #st{entry=L}=St) ->
+ %% NEVER move the entry label.
+ opt(Is, [I|Acc], St);
+opt([{label,L1},{jump,{f,L2}}=I|Is], [Prev|Acc], St0) ->
+ St = St0#st{mlbl=dict:append(L2, L1, St0#st.mlbl)},
+ opt([Prev,I|Is], Acc, label_used({f,L2}, St));
+opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) ->
+ case dict:find(Lbl, Mlbl) of
+ {ok,Lbls} ->
+ %% Essential to remove the list of labels from the dictionary,
+ %% since we will rescan the inserted labels. We MUST rescan.
+ St = St0#st{mlbl=dict:erase(Lbl, Mlbl)},
+ insert_labels([Lbl|Lbls], Is, Acc, St);
+ error -> opt(Is, [I|Acc], St0)
+ end;
+opt([{jump,{f,Lbl}},{label,Lbl}=I|Is], Acc, St) ->
+ opt([I|Is], Acc, St);
+opt([{jump,Lbl}=I|Is], Acc, St) ->
+ skip_unreachable(Is, [I|Acc], label_used(Lbl, St));
+opt([{loop_rec,Lbl,_R}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bif,_Name,Lbl,_As,_R}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bs_put_integer,Lbl,_Bits,_Unit,_Fl,_Val}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bs_put_binary,Lbl,_Bits,_Unit,_Fl,_Val}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bs_put_float,Lbl,_Bits,_Unit,_Fl,_Val}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bs_final,Lbl,_R}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bs_init2,Lbl,_,_,_,_,_}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bs_add,Lbl,_,_}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([{bs_bits_to_bytes,Lbl,_,_}=I|Is], Acc, St) ->
+ opt(Is, [I|Acc], label_used(Lbl, St));
+opt([I|Is], Acc, St) ->
+ case is_unreachable_after(I) of
+ true -> skip_unreachable(Is, [I|Acc], St);
+ false -> opt(Is, [I|Acc], St)
+ end;
+opt([], Acc, #st{fc=Fc,mlbl=Mlbl}) ->
+ Code = reverse(Acc),
+ case dict:find(Fc, Mlbl) of
+ {ok,Lbls} -> insert_fc_labels(Lbls, Mlbl, Code);
+ error -> Code
+ end.
+
+insert_fc_labels([L|Ls], Mlbl, Acc0) ->
+ Acc = [{label,L}|Acc0],
+ case dict:find(L, Mlbl) of
+ error ->
+ insert_fc_labels(Ls, Mlbl, Acc);
+ {ok,Lbls} ->
+ insert_fc_labels(Lbls++Ls, Mlbl, Acc)
+ end;
+insert_fc_labels([], _, Acc) -> Acc.
+
+%% invert_test(Test0) -> not_possible | Test
+
+invert_test(is_ge) -> is_lt;
+invert_test(is_lt) -> is_ge;
+invert_test(is_eq) -> is_ne;
+invert_test(is_ne) -> is_eq;
+invert_test(is_eq_exact) -> is_ne_exact;
+invert_test(is_ne_exact) -> is_eq_exact;
+invert_test(_) -> not_possible.
+
+insert_labels([L|Ls], Is, [{jump,{f,L}}|Acc], St) ->
+ insert_labels(Ls, [{label,L}|Is], Acc, St);
+insert_labels([L|Ls], Is, Acc, St) ->
+ insert_labels(Ls, [{label,L}|Is], Acc, St);
+insert_labels([], Is, Acc, St) ->
+ opt(Is, Acc, St).
+
+%% Skip unreachable code up to the next referenced label.
+
+skip_unreachable([{label,L}|Is], [{jump,{f,L}}|Acc], St) ->
+ opt([{label,L}|Is], Acc, St);
+skip_unreachable([{label,L}|Is], Acc, St) ->
+ case is_label_used(L, St) of
+ true -> opt([{label,L}|Is], Acc, St);
+ false -> skip_unreachable(Is, Acc, St)
+ end;
+skip_unreachable([_|Is], Acc, St) ->
+ skip_unreachable(Is, Acc, St);
+skip_unreachable([], Acc, St) ->
+ opt([], Acc, St).
+
+%% Add one or more label to the set of used labels.
+
+label_used({f,0}, St) -> St;
+label_used({f,L}, St) -> St#st{labels=gb_sets:add(L, St#st.labels)};
+label_used([H|T], St0) -> label_used(T, label_used(H, St0));
+label_used([], St) -> St;
+label_used(_Other, St) -> St.
+
+%% Test if label is used.
+
+is_label_used(L, St) ->
+ gb_sets:is_member(L, St#st.labels).
+
+%% is_unreachable_after(Instruction) -> true|false
+%% Test whether the code after Instruction is unreachable.
+
+is_unreachable_after({func_info,_M,_F,_A}) -> true;
+is_unreachable_after(return) -> true;
+is_unreachable_after({call_ext_last,_Ar,_ExtFunc,_D}) -> true;
+is_unreachable_after({call_ext_only,_Ar,_ExtFunc}) -> true;
+is_unreachable_after({call_last,_Ar,_Lbl,_D}) -> true;
+is_unreachable_after({call_only,_Ar,_Lbl}) -> true;
+is_unreachable_after({apply_last,_Ar,_N}) -> true;
+is_unreachable_after({jump,_Lbl}) -> true;
+is_unreachable_after({select_val,_R,_Lbl,_Cases}) -> true;
+is_unreachable_after({select_tuple_arity,_R,_Lbl,_Cases}) -> true;
+is_unreachable_after({loop_rec_end,_}) -> true;
+is_unreachable_after({wait,_}) -> true;
+is_unreachable_after(I) -> is_exit_instruction(I).
+
+%% is_exit_instruction(Instruction) -> true|false
+%% Test whether the instruction Instruction always
+%% causes an exit/failure.
+
+is_exit_instruction({call_ext,_,{extfunc,M,F,A}}) ->
+ is_exit_instruction_1(M, F, A);
+is_exit_instruction({call_ext_last,_,{extfunc,M,F,A},_}) ->
+ is_exit_instruction_1(M, F, A);
+is_exit_instruction({call_ext_only,_,{extfunc,M,F,A}}) ->
+ is_exit_instruction_1(M, F, A);
+is_exit_instruction(if_end) -> true;
+is_exit_instruction({case_end,_}) -> true;
+is_exit_instruction({try_case_end,_}) -> true;
+is_exit_instruction({badmatch,_}) -> true;
+is_exit_instruction(_) -> false.
+
+is_exit_instruction_1(erlang, exit, 1) -> true;
+is_exit_instruction_1(erlang, throw, 1) -> true;
+is_exit_instruction_1(erlang, error, 1) -> true;
+is_exit_instruction_1(erlang, error, 2) -> true;
+is_exit_instruction_1(erlang, fault, 1) -> true;
+is_exit_instruction_1(erlang, fault, 2) -> true;
+is_exit_instruction_1(_, _, _) -> false.
+
+%% remove_unused_labels(Instructions0) -> Instructions
+%% Remove all unused labels.
+
+remove_unused_labels(Is) ->
+ Used0 = initial_labels(Is),
+ Used = foldl(fun ulbl/2, Used0, Is),
+ rem_unused(Is, Used, []).
+
+rem_unused([{label,Lbl}=I|Is], Used, Acc) ->
+ case gb_sets:is_member(Lbl, Used) of
+ false -> rem_unused(Is, Used, Acc);
+ true -> rem_unused(Is, Used, [I|Acc])
+ end;
+rem_unused([I|Is], Used, Acc) ->
+ rem_unused(Is, Used, [I|Acc]);
+rem_unused([], _, Acc) -> reverse(Acc).
+
+initial_labels(Is) ->
+ initial_labels(Is, []).
+
+initial_labels([{label,Lbl}|Is], Acc) ->
+ initial_labels(Is, [Lbl|Acc]);
+initial_labels([{func_info,_,_,_},{label,Lbl}|_], Acc) ->
+ gb_sets:from_list([Lbl|Acc]).
+
+ulbl({test,_,Fail,_}, Used) ->
+ mark_used(Fail, Used);
+ulbl({select_val,_,Fail,{list,Vls}}, Used) ->
+ mark_used_list(Vls, mark_used(Fail, Used));
+ulbl({select_tuple_arity,_,Fail,{list,Vls}}, Used) ->
+ mark_used_list(Vls, mark_used(Fail, Used));
+ulbl({'try',_,Lbl}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({'catch',_,Lbl}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({jump,Lbl}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({loop_rec,Lbl,_}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({loop_rec_end,Lbl}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({wait,Lbl}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({wait_timeout,Lbl,_To}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bif,_Name,Lbl,_As,_R}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bs_init2,Lbl,_,_,_,_,_}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bs_put_integer,Lbl,_Bits,_Unit,_Fl,_Val}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bs_put_float,Lbl,_Bits,_Unit,_Fl,_Val}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bs_put_binary,Lbl,_Bits,_Unit,_Fl,_Val}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bs_final,Lbl,_}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bs_add,Lbl,_,_}, Used) ->
+ mark_used(Lbl, Used);
+ulbl({bs_bits_to_bytes,Lbl,_,_}, Used) ->
+ mark_used(Lbl, Used);
+ulbl(_, Used) -> Used.
+
+mark_used({f,0}, Used) -> Used;
+mark_used({f,L}, Used) -> gb_sets:add(L, Used);
+mark_used(_, Used) -> Used.
+
+mark_used_list([H|T], Used) ->
+ mark_used_list(T, mark_used(H, Used));
+mark_used_list([], Used) -> Used.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_listing.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_listing.erl
new file mode 100644
index 0000000000..006b8c551a
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_listing.erl
@@ -0,0 +1,117 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_listing.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+-module(beam_listing).
+
+-export([module/2]).
+
+-include("v3_life.hrl").
+
+-import(lists, [foreach/2]).
+
+module(File, Core) when element(1, Core) == c_module ->
+ %% This is a core module.
+ io:put_chars(File, core_pp:format(Core));
+module(File, Kern) when element(1, Kern) == k_mdef ->
+ %% This is a kernel module.
+ io:put_chars(File, v3_kernel_pp:format(Kern));
+ %%io:put_chars(File, io_lib:format("~p~n", [Kern]));
+module(File, {Mod,Exp,Attr,Kern}) ->
+ %% This is output from beam_life (v3).
+ io:fwrite(File, "~w.~n~p.~n~p.~n", [Mod,Exp,Attr]),
+ foreach(fun (F) -> function(File, F) end, Kern);
+module(Stream, {Mod,Exp,Attr,Code,NumLabels}) ->
+ %% This is output from beam_codegen.
+ io:format(Stream, "{module, ~s}. %% version = ~w\n",
+ [Mod, beam_opcodes:format_number()]),
+ io:format(Stream, "\n{exports, ~p}.\n", [Exp]),
+ io:format(Stream, "\n{attributes, ~p}.\n", [Attr]),
+ io:format(Stream, "\n{labels, ~p}.\n", [NumLabels]),
+ foreach(
+ fun ({function,Name,Arity,Entry,Asm}) ->
+ io:format(Stream, "\n\n{function, ~w, ~w, ~w}.\n",
+ [Name, Arity, Entry]),
+ foreach(fun(Op) -> print_op(Stream, Op) end, Asm) end,
+ Code);
+module(Stream, {Mod,Exp,Inter}) ->
+ %% Other kinds of intermediate formats.
+ io:fwrite(Stream, "~w.~n~p.~n", [Mod,Exp]),
+ foreach(fun (F) -> io:format(Stream, "~p.\n", [F]) end, Inter);
+module(Stream, [_|_]=Fs) ->
+ %% Form-based abstract format.
+ foreach(fun (F) -> io:format(Stream, "~p.\n", [F]) end, Fs).
+
+print_op(Stream, Label) when element(1, Label) == label ->
+ io:format(Stream, " ~p.\n", [Label]);
+print_op(Stream, Op) ->
+ io:format(Stream, " ~p.\n", [Op]).
+
+function(File, {function,Name,Arity,Args,Body,Vdb}) ->
+ io:nl(File),
+ io:format(File, "function ~p/~p.\n", [Name,Arity]),
+ io:format(File, " ~p.\n", [Args]),
+ print_vdb(File, Vdb),
+ put(beam_listing_nl, true),
+ foreach(fun(F) -> format(File, F, []) end, Body),
+ nl(File),
+ erase(beam_listing_nl).
+
+format(File, #l{ke=Ke,i=I,vdb=Vdb}, Ind) ->
+ nl(File),
+ ind_format(File, Ind, "~p ", [I]),
+ print_vdb(File, Vdb),
+ nl(File),
+ format(File, Ke, Ind);
+format(File, Tuple, Ind) when is_tuple(Tuple) ->
+ ind_format(File, Ind, "{", []),
+ format_list(File, tuple_to_list(Tuple), [$\s|Ind]),
+ ind_format(File, Ind, "}", []);
+format(File, List, Ind) when is_list(List) ->
+ ind_format(File, Ind, "[", []),
+ format_list(File, List, [$\s|Ind]),
+ ind_format(File, Ind, "]", []);
+format(File, F, Ind) ->
+ ind_format(File, Ind, "~p", [F]).
+
+format_list(File, [F], Ind) ->
+ format(File, F, Ind);
+format_list(File, [F|Fs], Ind) ->
+ format(File, F, Ind),
+ ind_format(File, Ind, ",", []),
+ format_list(File, Fs, Ind);
+format_list(_, [], _) -> ok.
+
+
+print_vdb(File, [{Var,F,E}|Vs]) ->
+ io:format(File, "~p:~p..~p ", [Var,F,E]),
+ print_vdb(File, Vs);
+print_vdb(_, []) -> ok.
+
+ind_format(File, Ind, Format, Args) ->
+ case get(beam_listing_nl) of
+ true ->
+ put(beam_listing_nl, false),
+ io:put_chars(File, Ind);
+ false -> ok
+ end,
+ io:format(File, Format, Args).
+
+nl(File) ->
+ case put(beam_listing_nl, true) of
+ true -> ok;
+ false -> io:nl(File)
+ end.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.erl
new file mode 100644
index 0000000000..a4f5fd34d2
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.erl
@@ -0,0 +1,240 @@
+-module(beam_opcodes).
+%% Warning: Do not edit this file. It was automatically
+%% generated by 'beam_makeops' on Wed Nov 24 17:52:43 2004.
+
+-export([format_number/0]).
+-export([opcode/2,opname/1]).
+
+format_number() -> 0.
+
+opcode(label, 1) -> 1;
+opcode(func_info, 3) -> 2;
+opcode(int_code_end, 0) -> 3;
+opcode(call, 2) -> 4;
+opcode(call_last, 3) -> 5;
+opcode(call_only, 2) -> 6;
+opcode(call_ext, 2) -> 7;
+opcode(call_ext_last, 3) -> 8;
+opcode(bif0, 2) -> 9;
+opcode(bif1, 4) -> 10;
+opcode(bif2, 5) -> 11;
+opcode(allocate, 2) -> 12;
+opcode(allocate_heap, 3) -> 13;
+opcode(allocate_zero, 2) -> 14;
+opcode(allocate_heap_zero, 3) -> 15;
+opcode(test_heap, 2) -> 16;
+opcode(init, 1) -> 17;
+opcode(deallocate, 1) -> 18;
+opcode(return, 0) -> 19;
+opcode(send, 0) -> 20;
+opcode(remove_message, 0) -> 21;
+opcode(timeout, 0) -> 22;
+opcode(loop_rec, 2) -> 23;
+opcode(loop_rec_end, 1) -> 24;
+opcode(wait, 1) -> 25;
+opcode(wait_timeout, 2) -> 26;
+opcode(m_plus, 4) -> 27;
+opcode(m_minus, 4) -> 28;
+opcode(m_times, 4) -> 29;
+opcode(m_div, 4) -> 30;
+opcode(int_div, 4) -> 31;
+opcode(int_rem, 4) -> 32;
+opcode(int_band, 4) -> 33;
+opcode(int_bor, 4) -> 34;
+opcode(int_bxor, 4) -> 35;
+opcode(int_bsl, 4) -> 36;
+opcode(int_bsr, 4) -> 37;
+opcode(int_bnot, 3) -> 38;
+opcode(is_lt, 3) -> 39;
+opcode(is_ge, 3) -> 40;
+opcode(is_eq, 3) -> 41;
+opcode(is_ne, 3) -> 42;
+opcode(is_eq_exact, 3) -> 43;
+opcode(is_ne_exact, 3) -> 44;
+opcode(is_integer, 2) -> 45;
+opcode(is_float, 2) -> 46;
+opcode(is_number, 2) -> 47;
+opcode(is_atom, 2) -> 48;
+opcode(is_pid, 2) -> 49;
+opcode(is_reference, 2) -> 50;
+opcode(is_port, 2) -> 51;
+opcode(is_nil, 2) -> 52;
+opcode(is_binary, 2) -> 53;
+opcode(is_constant, 2) -> 54;
+opcode(is_list, 2) -> 55;
+opcode(is_nonempty_list, 2) -> 56;
+opcode(is_tuple, 2) -> 57;
+opcode(test_arity, 3) -> 58;
+opcode(select_val, 3) -> 59;
+opcode(select_tuple_arity, 3) -> 60;
+opcode(jump, 1) -> 61;
+opcode('catch', 2) -> 62;
+opcode(catch_end, 1) -> 63;
+opcode(move, 2) -> 64;
+opcode(get_list, 3) -> 65;
+opcode(get_tuple_element, 3) -> 66;
+opcode(set_tuple_element, 3) -> 67;
+opcode(put_string, 3) -> 68;
+opcode(put_list, 3) -> 69;
+opcode(put_tuple, 2) -> 70;
+opcode(put, 1) -> 71;
+opcode(badmatch, 1) -> 72;
+opcode(if_end, 0) -> 73;
+opcode(case_end, 1) -> 74;
+opcode(call_fun, 1) -> 75;
+opcode(make_fun, 3) -> 76;
+opcode(is_function, 2) -> 77;
+opcode(call_ext_only, 2) -> 78;
+opcode(bs_start_match, 2) -> 79;
+opcode(bs_get_integer, 5) -> 80;
+opcode(bs_get_float, 5) -> 81;
+opcode(bs_get_binary, 5) -> 82;
+opcode(bs_skip_bits, 4) -> 83;
+opcode(bs_test_tail, 2) -> 84;
+opcode(bs_save, 1) -> 85;
+opcode(bs_restore, 1) -> 86;
+opcode(bs_init, 2) -> 87;
+opcode(bs_final, 2) -> 88;
+opcode(bs_put_integer, 5) -> 89;
+opcode(bs_put_binary, 5) -> 90;
+opcode(bs_put_float, 5) -> 91;
+opcode(bs_put_string, 2) -> 92;
+opcode(bs_need_buf, 1) -> 93;
+opcode(fclearerror, 0) -> 94;
+opcode(fcheckerror, 1) -> 95;
+opcode(fmove, 2) -> 96;
+opcode(fconv, 2) -> 97;
+opcode(fadd, 4) -> 98;
+opcode(fsub, 4) -> 99;
+opcode(fmul, 4) -> 100;
+opcode(fdiv, 4) -> 101;
+opcode(fnegate, 3) -> 102;
+opcode(make_fun2, 1) -> 103;
+opcode('try', 2) -> 104;
+opcode(try_end, 1) -> 105;
+opcode(try_case, 1) -> 106;
+opcode(try_case_end, 1) -> 107;
+opcode(raise, 2) -> 108;
+opcode(bs_init2, 6) -> 109;
+opcode(bs_bits_to_bytes, 3) -> 110;
+opcode(bs_add, 5) -> 111;
+opcode(apply, 1) -> 112;
+opcode(apply_last, 2) -> 113;
+opcode(is_boolean, 2) -> 114;
+opcode(Name, Arity) -> erlang:error(badarg, [Name,Arity]).
+
+opname(1) -> {label,1};
+opname(2) -> {func_info,3};
+opname(3) -> {int_code_end,0};
+opname(4) -> {call,2};
+opname(5) -> {call_last,3};
+opname(6) -> {call_only,2};
+opname(7) -> {call_ext,2};
+opname(8) -> {call_ext_last,3};
+opname(9) -> {bif0,2};
+opname(10) -> {bif1,4};
+opname(11) -> {bif2,5};
+opname(12) -> {allocate,2};
+opname(13) -> {allocate_heap,3};
+opname(14) -> {allocate_zero,2};
+opname(15) -> {allocate_heap_zero,3};
+opname(16) -> {test_heap,2};
+opname(17) -> {init,1};
+opname(18) -> {deallocate,1};
+opname(19) -> {return,0};
+opname(20) -> {send,0};
+opname(21) -> {remove_message,0};
+opname(22) -> {timeout,0};
+opname(23) -> {loop_rec,2};
+opname(24) -> {loop_rec_end,1};
+opname(25) -> {wait,1};
+opname(26) -> {wait_timeout,2};
+opname(27) -> {m_plus,4};
+opname(28) -> {m_minus,4};
+opname(29) -> {m_times,4};
+opname(30) -> {m_div,4};
+opname(31) -> {int_div,4};
+opname(32) -> {int_rem,4};
+opname(33) -> {int_band,4};
+opname(34) -> {int_bor,4};
+opname(35) -> {int_bxor,4};
+opname(36) -> {int_bsl,4};
+opname(37) -> {int_bsr,4};
+opname(38) -> {int_bnot,3};
+opname(39) -> {is_lt,3};
+opname(40) -> {is_ge,3};
+opname(41) -> {is_eq,3};
+opname(42) -> {is_ne,3};
+opname(43) -> {is_eq_exact,3};
+opname(44) -> {is_ne_exact,3};
+opname(45) -> {is_integer,2};
+opname(46) -> {is_float,2};
+opname(47) -> {is_number,2};
+opname(48) -> {is_atom,2};
+opname(49) -> {is_pid,2};
+opname(50) -> {is_reference,2};
+opname(51) -> {is_port,2};
+opname(52) -> {is_nil,2};
+opname(53) -> {is_binary,2};
+opname(54) -> {is_constant,2};
+opname(55) -> {is_list,2};
+opname(56) -> {is_nonempty_list,2};
+opname(57) -> {is_tuple,2};
+opname(58) -> {test_arity,3};
+opname(59) -> {select_val,3};
+opname(60) -> {select_tuple_arity,3};
+opname(61) -> {jump,1};
+opname(62) -> {'catch',2};
+opname(63) -> {catch_end,1};
+opname(64) -> {move,2};
+opname(65) -> {get_list,3};
+opname(66) -> {get_tuple_element,3};
+opname(67) -> {set_tuple_element,3};
+opname(68) -> {put_string,3};
+opname(69) -> {put_list,3};
+opname(70) -> {put_tuple,2};
+opname(71) -> {put,1};
+opname(72) -> {badmatch,1};
+opname(73) -> {if_end,0};
+opname(74) -> {case_end,1};
+opname(75) -> {call_fun,1};
+opname(76) -> {make_fun,3};
+opname(77) -> {is_function,2};
+opname(78) -> {call_ext_only,2};
+opname(79) -> {bs_start_match,2};
+opname(80) -> {bs_get_integer,5};
+opname(81) -> {bs_get_float,5};
+opname(82) -> {bs_get_binary,5};
+opname(83) -> {bs_skip_bits,4};
+opname(84) -> {bs_test_tail,2};
+opname(85) -> {bs_save,1};
+opname(86) -> {bs_restore,1};
+opname(87) -> {bs_init,2};
+opname(88) -> {bs_final,2};
+opname(89) -> {bs_put_integer,5};
+opname(90) -> {bs_put_binary,5};
+opname(91) -> {bs_put_float,5};
+opname(92) -> {bs_put_string,2};
+opname(93) -> {bs_need_buf,1};
+opname(94) -> {fclearerror,0};
+opname(95) -> {fcheckerror,1};
+opname(96) -> {fmove,2};
+opname(97) -> {fconv,2};
+opname(98) -> {fadd,4};
+opname(99) -> {fsub,4};
+opname(100) -> {fmul,4};
+opname(101) -> {fdiv,4};
+opname(102) -> {fnegate,3};
+opname(103) -> {make_fun2,1};
+opname(104) -> {'try',2};
+opname(105) -> {try_end,1};
+opname(106) -> {try_case,1};
+opname(107) -> {try_case_end,1};
+opname(108) -> {raise,2};
+opname(109) -> {bs_init2,6};
+opname(110) -> {bs_bits_to_bytes,3};
+opname(111) -> {bs_add,5};
+opname(112) -> {apply,1};
+opname(113) -> {apply_last,2};
+opname(114) -> {is_boolean,2};
+opname(Number) -> erlang:error(badarg, [Number]).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.hrl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.hrl
new file mode 100644
index 0000000000..1ad0887314
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_opcodes.hrl
@@ -0,0 +1,12 @@
+%% Warning: Do not edit this file. It was automatically
+%% generated by 'beam_makeops' on Wed Nov 24 17:52:43 2004.
+
+-define(tag_u, 0).
+-define(tag_i, 1).
+-define(tag_a, 2).
+-define(tag_x, 3).
+-define(tag_y, 4).
+-define(tag_f, 5).
+-define(tag_h, 6).
+-define(tag_z, 7).
+
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_type.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_type.erl
new file mode 100644
index 0000000000..7d288b249c
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_type.erl
@@ -0,0 +1,551 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_type.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%% Purpose : Type-based optimisations.
+
+-module(beam_type).
+
+-export([module/2]).
+
+-import(lists, [map/2,foldl/3,reverse/1,reverse/2,filter/2,member/2]).
+
+module({Mod,Exp,Attr,Fs0,Lc}, Opt) ->
+ AllowFloatOpts = not member(no_float_opt, Opt),
+ Fs = map(fun(F) -> function(F, AllowFloatOpts) end, Fs0),
+ {ok,{Mod,Exp,Attr,Fs,Lc}}.
+
+function({function,Name,Arity,CLabel,Asm0}, AllowFloatOpts) ->
+ Asm = opt(Asm0, AllowFloatOpts, [], tdb_new()),
+ {function,Name,Arity,CLabel,Asm}.
+
+%% opt([Instruction], AllowFloatOpts, Accumulator, TypeDb) -> {[Instruction'],TypeDb'}
+%% Keep track of type information; try to simplify.
+
+opt([{block,Body1}|Is], AllowFloatOpts, [{block,Body0}|Acc], Ts0) ->
+ {Body2,Ts} = simplify(Body1, Ts0, AllowFloatOpts),
+ Body = beam_block:merge_blocks(Body0, Body2),
+ opt(Is, AllowFloatOpts, [{block,Body}|Acc], Ts);
+opt([{block,Body0}|Is], AllowFloatOpts, Acc, Ts0) ->
+ {Body,Ts} = simplify(Body0, Ts0, AllowFloatOpts),
+ opt(Is, AllowFloatOpts, [{block,Body}|Acc], Ts);
+opt([I0|Is], AllowFloatOpts, Acc, Ts0) ->
+ case simplify([I0], Ts0, AllowFloatOpts) of
+ {[],Ts} -> opt(Is, AllowFloatOpts, Acc, Ts);
+ {[I],Ts} -> opt(Is, AllowFloatOpts, [I|Acc], Ts)
+ end;
+opt([], _, Acc, _) -> reverse(Acc).
+
+%% simplify(Instruction, TypeDb, AllowFloatOpts) -> NewInstruction
+%% Simplify an instruction using type information (this is
+%% technically a "strength reduction").
+
+simplify(Is, TypeDb, false) ->
+ simplify(Is, TypeDb, no_float_opt, []);
+simplify(Is, TypeDb, true) ->
+ case are_live_regs_determinable(Is) of
+ false -> simplify(Is, TypeDb, no_float_opt, []);
+ true -> simplify(Is, TypeDb, [], [])
+ end.
+
+simplify([{set,[D],[{integer,Index},Reg],{bif,element,_}}=I0|Is]=Is0, Ts0, Rs0, Acc0) ->
+ I = case max_tuple_size(Reg, Ts0) of
+ Sz when 0 < Index, Index =< Sz ->
+ {set,[D],[Reg],{get_tuple_element,Index-1}};
+ _Other -> I0
+ end,
+ Ts = update(I, Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc0),
+ simplify(Is, Ts, Rs, [I|checkerror(Acc)]);
+simplify([{set,[D0],[A],{bif,'-',{f,0}}}=I|Is]=Is0, Ts0, Rs0, Acc0)
+ when Rs0 =/= no_float_opt ->
+ case tdb_find(A, Ts0) of
+ float ->
+ {Rs1,Acc1} = load_reg(A, Ts0, Rs0, Acc0),
+ {D,Rs} = find_dest(D0, Rs1),
+ Areg = fetch_reg(A, Rs),
+ Acc = [{set,[D],[Areg],{bif,fnegate,{f,0}}}|clearerror(Acc1)],
+ Ts = tdb_update([{D0,float}], Ts0),
+ simplify(Is, Ts, Rs, Acc);
+ _Other ->
+ Ts = update(I, Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc0),
+ simplify(Is, Ts, Rs, [I|checkerror(Acc)])
+ end;
+simplify([{set,[_],[_],{bif,_,{f,0}}}=I|Is]=Is0, Ts0, Rs0, Acc0) ->
+ Ts = update(I, Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc0),
+ simplify(Is, Ts, Rs, [I|checkerror(Acc)]);
+simplify([{set,[D0],[A,B],{bif,Op0,{f,0}}}=I|Is]=Is0, Ts0, Rs0, Acc0)
+ when Rs0 =/= no_float_opt ->
+ case float_op(Op0, A, B, Ts0) of
+ no ->
+ Ts = update(I, Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc0),
+ simplify(Is, Ts, Rs, [I|checkerror(Acc)]);
+ {yes,Op} ->
+ {Rs1,Acc1} = load_reg(A, Ts0, Rs0, Acc0),
+ {Rs2,Acc2} = load_reg(B, Ts0, Rs1, Acc1),
+ {D,Rs} = find_dest(D0, Rs2),
+ Areg = fetch_reg(A, Rs),
+ Breg = fetch_reg(B, Rs),
+ Acc = [{set,[D],[Areg,Breg],{bif,Op,{f,0}}}|clearerror(Acc2)],
+ Ts = tdb_update([{D0,float}], Ts0),
+ simplify(Is, Ts, Rs, Acc)
+ end;
+simplify([{set,[D],[TupleReg],{get_tuple_element,0}}=I|Is0], Ts0, Rs0, Acc0) ->
+ case tdb_find(TupleReg, Ts0) of
+ {tuple,_,[Contents]} ->
+ Ts = tdb_update([{D,Contents}], Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc0),
+ simplify(Is0, Ts, Rs, [{set,[D],[Contents],move}|Acc]);
+ _ ->
+ Ts = update(I, Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc0),
+ simplify(Is0, Ts, Rs, [I|checkerror(Acc)])
+ end;
+simplify([{set,_,_,{'catch',_}}=I|Is]=Is0, _Ts, Rs0, Acc0) ->
+ Acc = flush_all(Rs0, Is0, Acc0),
+ simplify(Is, tdb_new(), Rs0, [I|Acc]);
+simplify([{test,is_tuple,_,[R]}=I|Is], Ts, Rs, Acc) ->
+ case tdb_find(R, Ts) of
+ {tuple,_,_} -> simplify(Is, Ts, Rs, Acc);
+ _ ->
+ simplify(Is, Ts, Rs, [I|Acc])
+ end;
+simplify([{test,test_arity,_,[R,Arity]}=I|Is], Ts0, Rs, Acc) ->
+ case tdb_find(R, Ts0) of
+ {tuple,Arity,_} ->
+ simplify(Is, Ts0, Rs, Acc);
+ _Other ->
+ Ts = update(I, Ts0),
+ simplify(Is, Ts, Rs, [I|Acc])
+ end;
+simplify([{test,is_eq_exact,Fail,[R,{atom,_}=Atom]}=I|Is0], Ts0, Rs0, Acc0) ->
+ Acc1 = case tdb_find(R, Ts0) of
+ {atom,_}=Atom -> Acc0;
+ {atom,_} -> [{jump,Fail}|Acc0];
+ _ -> [I|Acc0]
+ end,
+ Ts = update(I, Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc1),
+ simplify(Is0, Ts, Rs, Acc);
+simplify([I|Is]=Is0, Ts0, Rs0, Acc0) ->
+ Ts = update(I, Ts0),
+ {Rs,Acc} = flush(Rs0, Is0, Acc0),
+ simplify(Is, Ts, Rs, [I|Acc]);
+simplify([], Ts, Rs, Acc) ->
+ Is0 = reverse(flush_all(Rs, [], Acc)),
+ Is1 = opt_fmoves(Is0, []),
+ Is = add_ftest_heap(Is1),
+ {Is,Ts}.
+
+opt_fmoves([{set,[{x,_}=R],[{fr,_}]=Src,fmove}=I1,
+ {set,[{y,_}]=Dst,[{x,_}=R],move}=I2|Is], Acc) ->
+ case beam_block:is_killed(R, Is) of
+ false -> opt_fmoves(Is, [I2,I1|Acc]);
+ true -> opt_fmoves(Is, [{set,Dst,Src,fmove}|Acc])
+ end;
+opt_fmoves([I|Is], Acc) ->
+ opt_fmoves(Is, [I|Acc]);
+opt_fmoves([], Acc) -> reverse(Acc).
+
+clearerror(Is) ->
+ clearerror(Is, Is).
+
+clearerror([{set,[],[],fclearerror}|_], OrigIs) -> OrigIs;
+clearerror([{set,[],[],fcheckerror}|_], OrigIs) -> [{set,[],[],fclearerror}|OrigIs];
+clearerror([_|Is], OrigIs) -> clearerror(Is, OrigIs);
+clearerror([], OrigIs) -> [{set,[],[],fclearerror}|OrigIs].
+
+%% update(Instruction, TypeDb) -> NewTypeDb
+%% Update the type database to account for executing an instruction.
+%%
+%% First the cases for instructions inside basic blocks.
+update({set,[D],[S],move}, Ts0) ->
+ Ops = case tdb_find(S, Ts0) of
+ error -> [{D,kill}];
+ Info -> [{D,Info}]
+ end,
+ tdb_update(Ops, Ts0);
+update({set,[D],[{integer,I},Reg],{bif,element,_}}, Ts0) ->
+ tdb_update([{Reg,{tuple,I,[]}},{D,kill}], Ts0);
+update({set,[D],[_Index,Reg],{bif,element,_}}, Ts0) ->
+ tdb_update([{Reg,{tuple,0,[]}},{D,kill}], Ts0);
+update({set,[D],[S],{get_tuple_element,0}}, Ts) ->
+ tdb_update([{D,{tuple_element,S,0}}], Ts);
+update({set,[D],[S],{bif,float,{f,0}}}, Ts0) ->
+ %% Make sure we reject non-numeric literal argument.
+ case possibly_numeric(S) of
+ true -> tdb_update([{D,float}], Ts0);
+ false -> Ts0
+ end;
+update({set,[D],[S1,S2],{bif,'/',{f,0}}}, Ts0) ->
+ %% Make sure we reject non-numeric literals.
+ case possibly_numeric(S1) andalso possibly_numeric(S2) of
+ true -> tdb_update([{D,float}], Ts0);
+ false -> Ts0
+ end;
+update({set,[D],[S1,S2],{bif,Op,{f,0}}}, Ts0) ->
+ case arith_op(Op) of
+ no ->
+ tdb_update([{D,kill}], Ts0);
+ {yes,_} ->
+ case {tdb_find(S1, Ts0),tdb_find(S2, Ts0)} of
+ {float,_} -> tdb_update([{D,float}], Ts0);
+ {_,float} -> tdb_update([{D,float}], Ts0);
+ {_,_} -> tdb_update([{D,kill}], Ts0)
+ end
+ end;
+update({set,[],_Src,_Op}, Ts0) -> Ts0;
+update({set,[D],_Src,_Op}, Ts0) ->
+ tdb_update([{D,kill}], Ts0);
+update({set,[D1,D2],_Src,_Op}, Ts0) ->
+ tdb_update([{D1,kill},{D2,kill}], Ts0);
+update({allocate,_,_}, Ts) -> Ts;
+update({init,D}, Ts) ->
+ tdb_update([{D,kill}], Ts);
+update({kill,D}, Ts) ->
+ tdb_update([{D,kill}], Ts);
+update({'%live',_}, Ts) -> Ts;
+
+%% Instructions outside of blocks.
+update({test,is_float,_Fail,[Src]}, Ts0) ->
+ tdb_update([{Src,float}], Ts0);
+update({test,test_arity,_Fail,[Src,Arity]}, Ts0) ->
+ tdb_update([{Src,{tuple,Arity,[]}}], Ts0);
+update({test,is_eq_exact,_,[Reg,{atom,_}=Atom]}, Ts) ->
+ case tdb_find(Reg, Ts) of
+ error ->
+ Ts;
+ {tuple_element,TupleReg,0} ->
+ tdb_update([{TupleReg,{tuple,1,[Atom]}}], Ts);
+ _ ->
+ Ts
+ end;
+update({test,_Test,_Fail,_Other}, Ts) -> Ts;
+update({call_ext,1,{extfunc,math,Math,1}}, Ts) ->
+ case is_math_bif(Math, 1) of
+ true -> tdb_update([{{x,0},float}], Ts);
+ false -> tdb_kill_xregs(Ts)
+ end;
+update({call_ext,2,{extfunc,math,Math,2}}, Ts) ->
+ case is_math_bif(Math, 2) of
+ true -> tdb_update([{{x,0},float}], Ts);
+ false -> tdb_kill_xregs(Ts)
+ end;
+update({call_ext,3,{extfunc,erlang,setelement,3}}, Ts0) ->
+ Op = case tdb_find({x,1}, Ts0) of
+ error -> kill;
+ Info -> Info
+ end,
+ Ts1 = tdb_kill_xregs(Ts0),
+ tdb_update([{{x,0},Op}], Ts1);
+update({call,_Arity,_Func}, Ts) -> tdb_kill_xregs(Ts);
+update({call_ext,_Arity,_Func}, Ts) -> tdb_kill_xregs(Ts);
+update({make_fun2,_,_,_,_}, Ts) -> tdb_kill_xregs(Ts);
+
+%% The instruction is unknown. Kill all information.
+update(_I, _Ts) -> tdb_new().
+
+is_math_bif(cos, 1) -> true;
+is_math_bif(cosh, 1) -> true;
+is_math_bif(sin, 1) -> true;
+is_math_bif(sinh, 1) -> true;
+is_math_bif(tan, 1) -> true;
+is_math_bif(tanh, 1) -> true;
+is_math_bif(acos, 1) -> true;
+is_math_bif(acosh, 1) -> true;
+is_math_bif(asin, 1) -> true;
+is_math_bif(asinh, 1) -> true;
+is_math_bif(atan, 1) -> true;
+is_math_bif(atanh, 1) -> true;
+is_math_bif(erf, 1) -> true;
+is_math_bif(erfc, 1) -> true;
+is_math_bif(exp, 1) -> true;
+is_math_bif(log, 1) -> true;
+is_math_bif(log10, 1) -> true;
+is_math_bif(sqrt, 1) -> true;
+is_math_bif(atan2, 2) -> true;
+is_math_bif(pow, 2) -> true;
+is_math_bif(pi, 0) -> true;
+is_math_bif(_, _) -> false.
+
+%% Reject non-numeric literals.
+possibly_numeric({x,_}) -> true;
+possibly_numeric({y,_}) -> true;
+possibly_numeric({integer,_}) -> true;
+possibly_numeric({float,_}) -> true;
+possibly_numeric(_) -> false.
+
+max_tuple_size(Reg, Ts) ->
+ case tdb_find(Reg, Ts) of
+ {tuple,Sz,_} -> Sz;
+ _Other -> 0
+ end.
+
+float_op('/', A, B, _) ->
+ case possibly_numeric(A) andalso possibly_numeric(B) of
+ true -> {yes,fdiv};
+ false -> no
+ end;
+float_op(Op, {float,_}, B, _) ->
+ case possibly_numeric(B) of
+ true -> arith_op(Op);
+ false -> no
+ end;
+float_op(Op, A, {float,_}, _) ->
+ case possibly_numeric(A) of
+ true -> arith_op(Op);
+ false -> no
+ end;
+float_op(Op, A, B, Ts) ->
+ case {tdb_find(A, Ts),tdb_find(B, Ts)} of
+ {float,_} -> arith_op(Op);
+ {_,float} -> arith_op(Op);
+ {_,_} -> no
+ end.
+
+find_dest(V, Rs0) ->
+ case find_reg(V, Rs0) of
+ {ok,FR} ->
+ {FR,mark(V, Rs0, dirty)};
+ error ->
+ Rs = put_reg(V, Rs0, dirty),
+ {ok,FR} = find_reg(V, Rs),
+ {FR,Rs}
+ end.
+
+load_reg({float,_}=F, _, Rs0, Is0) ->
+ Rs = put_reg(F, Rs0, clean),
+ {ok,FR} = find_reg(F, Rs),
+ Is = [{set,[FR],[F],fmove}|Is0],
+ {Rs,Is};
+load_reg(V, Ts, Rs0, Is0) ->
+ case find_reg(V, Rs0) of
+ {ok,_FR} -> {Rs0,Is0};
+ error ->
+ Rs = put_reg(V, Rs0, clean),
+ {ok,FR} = find_reg(V, Rs),
+ Op = case tdb_find(V, Ts) of
+ float -> fmove;
+ _ -> fconv
+ end,
+ Is = [{set,[FR],[V],Op}|Is0],
+ {Rs,Is}
+ end.
+
+arith_op('+') -> {yes,fadd};
+arith_op('-') -> {yes,fsub};
+arith_op('*') -> {yes,fmul};
+arith_op('/') -> {yes,fdiv};
+arith_op(_) -> no.
+
+flush(no_float_opt, _, Acc) -> {no_float_opt,Acc};
+flush(Rs, [{set,[_],[],{put_tuple,_}}|_]=Is0, Acc0) ->
+ Acc = flush_all(Rs, Is0, Acc0),
+ {[],Acc};
+flush(Rs0, [{set,Ds,Ss,_Op}|_], Acc0) ->
+ Save = gb_sets:from_list(Ss),
+ Acc = save_regs(Rs0, Save, Acc0),
+ Rs1 = foldl(fun(S, A) -> mark(S, A, clean) end, Rs0, Ss),
+ Kill = gb_sets:from_list(Ds),
+ Rs = kill_regs(Rs1, Kill),
+ {Rs,Acc};
+flush(Rs0, Is, Acc0) ->
+ Acc = flush_all(Rs0, Is, Acc0),
+ {[],Acc}.
+
+flush_all(no_float_opt, _, Acc) -> Acc;
+flush_all([{_,{float,_},_}|Rs], Is, Acc) ->
+ flush_all(Rs, Is, Acc);
+flush_all([{I,V,dirty}|Rs], Is, Acc0) ->
+ Acc = checkerror(Acc0),
+ case beam_block:is_killed(V, Is) of
+ true -> flush_all(Rs, Is, Acc);
+ false -> flush_all(Rs, Is, [{set,[V],[{fr,I}],fmove}|Acc])
+ end;
+flush_all([{_,_,clean}|Rs], Is, Acc) -> flush_all(Rs, Is, Acc);
+flush_all([free|Rs], Is, Acc) -> flush_all(Rs, Is, Acc);
+flush_all([], _, Acc) -> Acc.
+
+save_regs(Rs, Save, Acc) ->
+ foldl(fun(R, A) -> save_reg(R, Save, A) end, Acc, Rs).
+
+save_reg({I,V,dirty}, Save, Acc) ->
+ case gb_sets:is_member(V, Save) of
+ true -> [{set,[V],[{fr,I}],fmove}|checkerror(Acc)];
+ false -> Acc
+ end;
+save_reg(_, _, Acc) -> Acc.
+
+kill_regs(Rs, Kill) ->
+ map(fun(R) -> kill_reg(R, Kill) end, Rs).
+
+kill_reg({_,V,_}=R, Kill) ->
+ case gb_sets:is_member(V, Kill) of
+ true -> free;
+ false -> R
+ end;
+kill_reg(R, _) -> R.
+
+mark(V, [{I,V,_}|Rs], Mark) -> [{I,V,Mark}|Rs];
+mark(V, [R|Rs], Mark) -> [R|mark(V, Rs, Mark)];
+mark(_, [], _) -> [].
+
+fetch_reg(V, [{I,V,_}|_]) -> {fr,I};
+fetch_reg(V, [_|SRs]) -> fetch_reg(V, SRs).
+
+find_reg(V, [{I,V,_}|_]) -> {ok,{fr,I}};
+find_reg(V, [_|SRs]) -> find_reg(V, SRs);
+find_reg(_, []) -> error.
+
+put_reg(V, Rs, Dirty) -> put_reg_1(V, Rs, Dirty, 0).
+
+put_reg_1(V, [free|Rs], Dirty, I) -> [{I,V,Dirty}|Rs];
+put_reg_1(V, [R|Rs], Dirty, I) -> [R|put_reg_1(V, Rs, Dirty, I+1)];
+put_reg_1(V, [], Dirty, I) -> [{I,V,Dirty}].
+
+checkerror(Is) ->
+ checkerror_1(Is, Is).
+
+checkerror_1([{set,[],[],fcheckerror}|_], OrigIs) -> OrigIs;
+checkerror_1([{set,[],[],fclearerror}|_], OrigIs) -> OrigIs;
+checkerror_1([{set,_,_,{bif,fadd,_}}|_], OrigIs) -> checkerror_2(OrigIs);
+checkerror_1([{set,_,_,{bif,fsub,_}}|_], OrigIs) -> checkerror_2(OrigIs);
+checkerror_1([{set,_,_,{bif,fmul,_}}|_], OrigIs) -> checkerror_2(OrigIs);
+checkerror_1([{set,_,_,{bif,fdiv,_}}|_], OrigIs) -> checkerror_2(OrigIs);
+checkerror_1([{set,_,_,{bif,fnegate,_}}|_], OrigIs) -> checkerror_2(OrigIs);
+checkerror_1([_|Is], OrigIs) -> checkerror_1(Is, OrigIs);
+checkerror_1([], OrigIs) -> OrigIs.
+
+checkerror_2(OrigIs) -> [{set,[],[],fcheckerror}|OrigIs].
+
+add_ftest_heap(Is) ->
+ add_ftest_heap_1(reverse(Is), 0, []).
+
+add_ftest_heap_1([{set,_,[{fr,_}],fmove}=I|Is], Floats, Acc) ->
+ add_ftest_heap_1(Is, Floats+1, [I|Acc]);
+add_ftest_heap_1([{allocate,_,_}=I|Is], 0, Acc) ->
+ reverse(Is, [I|Acc]);
+add_ftest_heap_1([{allocate,Regs,{Z,Stk,Heap,Inits}}|Is], Floats, Acc) ->
+ reverse(Is, [{allocate,Regs,{Z,Stk,Heap,Floats,Inits}}|Acc]);
+add_ftest_heap_1([I|Is], Floats, Acc) ->
+ add_ftest_heap_1(Is, Floats, [I|Acc]);
+add_ftest_heap_1([], 0, Acc) ->
+ Acc;
+add_ftest_heap_1([], Floats, Is) ->
+ Regs = beam_block:live_at_entry(Is),
+ [{allocate,Regs,{nozero,nostack,0,Floats,[]}}|Is].
+
+are_live_regs_determinable([{allocate,_,_}|_]) -> true;
+are_live_regs_determinable([{'%live',_}|_]) -> true;
+are_live_regs_determinable([_|Is]) -> are_live_regs_determinable(Is);
+are_live_regs_determinable([]) -> false.
+
+
+%%% Routines for maintaining a type database. The type database
+%%% associates type information with registers.
+%%%
+%%% {tuple,Size,First} means that the corresponding register contains a
+%%% tuple with *at least* Size elements. An tuple with unknown
+%%% size is represented as {tuple,0}. First is either [] (meaning that
+%%% the tuple's first element is unknown) or [FirstElement] (the contents
+%%% of the first element).
+%%%
+%%% 'float' means that the register contains a float.
+
+%% tdb_new() -> EmptyDataBase
+%% Creates a new, empty type database.
+
+tdb_new() -> [].
+
+%% tdb_find(Register, Db) -> Information|error
+%% Returns type information or the atom error if there are no type
+%% information available for Register.
+
+tdb_find(Key, [{K,_}|_]) when Key < K -> error;
+tdb_find(Key, [{Key,Info}|_]) -> Info;
+tdb_find(Key, [_|Db]) -> tdb_find(Key, Db);
+tdb_find(_, []) -> error.
+
+%% tdb_update([UpdateOp], Db) -> NewDb
+%% UpdateOp = {Register,kill}|{Register,NewInfo}
+%% Updates a type database. If a 'kill' operation is given, the type
+%% information for that register will be removed from the database.
+%% A kill operation takes precende over other operations for the same
+%% register (i.e. [{{x,0},kill},{{x,0},{tuple,5}}] means that the
+%% the existing type information, if any, will be discarded, and the
+%% the '{tuple,5}' information ignored.
+%%
+%% If NewInfo information is given and there exists information about
+%% the register, the old and new type information will be merged.
+%% For instance, {tuple,5} and {tuple,10} will be merged to produce
+%% {tuple,10}.
+
+tdb_update(Uis0, Ts0) ->
+ Uis1 = filter(fun ({{x,_},_Op}) -> true;
+ ({{y,_},_Op}) -> true;
+ (_) -> false
+ end, Uis0),
+ tdb_update1(lists:sort(Uis1), Ts0).
+
+tdb_update1([{Key,kill}|Ops], [{K,_Old}|_]=Db) when Key < K ->
+ tdb_update1(remove_key(Key, Ops), Db);
+tdb_update1([{Key,_New}=New|Ops], [{K,_Old}|_]=Db) when Key < K ->
+ [New|tdb_update1(Ops, Db)];
+tdb_update1([{Key,kill}|Ops], [{Key,_}|Db]) ->
+ tdb_update1(remove_key(Key, Ops), Db);
+tdb_update1([{Key,NewInfo}|Ops], [{Key,OldInfo}|Db]) ->
+ [{Key,merge_type_info(NewInfo, OldInfo)}|tdb_update1(Ops, Db)];
+tdb_update1([{_,_}|_]=Ops, [Old|Db]) ->
+ [Old|tdb_update1(Ops, Db)];
+tdb_update1([{Key,kill}|Ops], []) ->
+ tdb_update1(remove_key(Key, Ops), []);
+tdb_update1([{_,_}=New|Ops], []) ->
+ [New|tdb_update1(Ops, [])];
+tdb_update1([], Db) -> Db.
+
+%% tdb_kill_xregs(Db) -> NewDb
+%% Kill all information about x registers. Also kill all tuple_element
+%% dependencies from y registers to x registers.
+
+tdb_kill_xregs([{{x,_},_Type}|Db]) -> tdb_kill_xregs(Db);
+tdb_kill_xregs([{{y,_},{tuple_element,{x,_},_}}|Db]) -> tdb_kill_xregs(Db);
+tdb_kill_xregs([Any|Db]) -> [Any|tdb_kill_xregs(Db)];
+tdb_kill_xregs([]) -> [].
+
+remove_key(Key, [{Key,_Op}|Ops]) -> remove_key(Key, Ops);
+remove_key(_, Ops) -> Ops.
+
+merge_type_info(I, I) -> I;
+merge_type_info({tuple,Sz1,Same}, {tuple,Sz2,Same}=Max) when Sz1 < Sz2 ->
+ Max;
+merge_type_info({tuple,Sz1,Same}=Max, {tuple,Sz2,Same}) when Sz1 > Sz2 ->
+ Max;
+merge_type_info({tuple,Sz1,[]}, {tuple,Sz2,First}) ->
+ merge_type_info({tuple,Sz1,First}, {tuple,Sz2,First});
+merge_type_info({tuple,Sz1,First}, {tuple,Sz2,_}) ->
+ merge_type_info({tuple,Sz1,First}, {tuple,Sz2,First});
+merge_type_info(NewType, _) ->
+ verify_type(NewType),
+ NewType.
+
+verify_type({tuple,Sz,[]}) when is_integer(Sz) -> ok;
+verify_type({tuple,Sz,[_]}) when is_integer(Sz) -> ok;
+verify_type({tuple_element,_,_}) -> ok;
+verify_type(float) -> ok;
+verify_type({atom,_}) -> ok.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_validator.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_validator.erl
new file mode 100644
index 0000000000..a01be447b0
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/beam_validator.erl
@@ -0,0 +1,1022 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: beam_validator.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+
+-module(beam_validator).
+
+-export([file/1,files/1]).
+
+%% Interface for compiler.
+-export([module/2,format_error/1]).
+
+-import(lists, [reverse/1,foldl/3]).
+
+-define(MAXREG, 1024).
+
+-define(DEBUG, 1).
+-undef(DEBUG).
+-ifdef(DEBUG).
+-define(DBG_FORMAT(F, D), (io:format((F), (D)))).
+-else.
+-define(DBG_FORMAT(F, D), ok).
+-endif.
+
+%%%
+%%% API functions.
+%%%
+
+files([F|Fs]) ->
+ ?DBG_FORMAT("# Verifying: ~p~n", [F]),
+ case file(F) of
+ ok -> ok;
+ {error,Es} ->
+ io:format("~p:~n~s~n", [F,format_error(Es)])
+ end,
+ files(Fs);
+files([]) -> ok.
+
+file(Name) when is_list(Name) ->
+ case case filename:extension(Name) of
+ ".S" -> s_file(Name);
+ ".beam" -> beam_file(Name)
+ end of
+ [] -> ok;
+ Es -> {error,Es}
+ end.
+
+%% To be called by the compiler.
+module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
+ when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) ->
+ case validate(Fs) of
+ [] -> {ok,Code};
+ Es0 ->
+ Es = [{?MODULE,E} || E <- Es0],
+ {error,[{atom_to_list(Mod),Es}]}
+ end.
+
+format_error([]) -> [];
+format_error([{{M,F,A},{I,Off,Desc}}|Es]) ->
+ [io_lib:format(" ~p:~p/~p+~p:~n ~p - ~p~n",
+ [M,F,A,Off,I,Desc])|format_error(Es)];
+format_error({{_M,F,A},{I,Off,Desc}}) ->
+ io_lib:format(
+ "function ~p/~p+~p:~n"
+ " Internal consistency check failed - please report this bug.~n"
+ " Instruction: ~p~n"
+ " Error: ~p:~n", [F,A,Off,I,Desc]).
+
+%%%
+%%% Local functions follow.
+%%%
+
+s_file(Name) ->
+ {ok,Is} = file:consult(Name),
+ Fs = find_functions(Is),
+ validate(Fs).
+
+find_functions(Fs) ->
+ find_functions_1(Fs, none, [], []).
+
+find_functions_1([{function,Name,Arity,Entry}|Is], Func, FuncAcc, Acc0) ->
+ Acc = add_func(Func, FuncAcc, Acc0),
+ find_functions_1(Is, {Name,Arity,Entry}, [], Acc);
+find_functions_1([I|Is], Func, FuncAcc, Acc) ->
+ find_functions_1(Is, Func, [I|FuncAcc], Acc);
+find_functions_1([], Func, FuncAcc, Acc) ->
+ reverse(add_func(Func, FuncAcc, Acc)).
+
+add_func(none, _, Acc) -> Acc;
+add_func({Name,Arity,Entry}, Is, Acc) ->
+ [{function,Name,Arity,Entry,reverse(Is)}|Acc].
+
+beam_file(Name) ->
+ try beam_disasm:file(Name) of
+ {error,beam_lib,Reason} -> [{beam_lib,Reason}];
+ {beam_file,L} ->
+ {value,{code,Code0}} = lists:keysearch(code, 1, L),
+ Code = beam_file_1(Code0, []),
+ validate(Code)
+ catch _:_ -> [disassembly_failed]
+ end.
+
+beam_file_1([F0|Fs], Acc) ->
+ F = conv_func(F0),
+ beam_file_1(Fs, [F|Acc]);
+beam_file_1([], Acc) -> reverse(Acc).
+
+%% Convert from the disassembly format to the internal format
+%% used by the compiler (as passed to the assembler).
+
+conv_func(Is) ->
+ conv_func_1(labels(Is)).
+
+conv_func_1({Ls,[{func_info,[{atom,M},{atom,F},Ar]},
+ {label,Entry}=Le|Is]}) ->
+ %% The entry label gets maybe not correct here
+ {function,F,Ar,Entry,
+ [{label,L}||L<-Ls]++[{func_info,{atom,M},{atom,F},Ar},Le|Is]}.
+
+%%%
+%%% The validator follows.
+%%%
+%%% The purpose of the validator is find errors in the generated code
+%%% that may cause the emulator to crash or behave strangely.
+%%% We don't care about type errors in the user's code that will
+%%% cause a proper exception at run-time.
+%%%
+
+%%% Things currently not checked. XXX
+%%%
+%%% - That floating point registers are initialized before used.
+%%% - That fclearerror and fcheckerror are used properly.
+%%% - Heap allocation for floating point numbers.
+%%% - Heap allocation for binaries.
+%%% - That a catchtag or trytag is not overwritten by the wrong
+%%% type of instruction (such as move/2).
+%%% - Make sure that all catchtags and trytags have been removed
+%%% from the stack at return/tail call.
+%%% - Verify get_list instructions.
+%%%
+
+%% validate([Function]) -> [] | [Error]
+%% A list of functions with their code. The code is in the same
+%% format as used in the compiler and in .S files.
+validate([]) -> [];
+validate([{function,Name,Ar,Entry,Code}|Fs]) ->
+ try validate_1(Code, Name, Ar, Entry) of
+ _ -> validate(Fs)
+ catch
+ Error ->
+ [Error|validate(Fs)];
+ error:Error ->
+ [validate_error(Error, Name, Ar)|validate(Fs)]
+ end.
+
+-ifdef(DEBUG).
+validate_error(Error, Name, Ar) ->
+ exit(validate_error_1(Error, Name, Ar)).
+-else.
+validate_error(Error, Name, Ar) ->
+ validate_error_1(Error, Name, Ar).
+-endif.
+validate_error_1(Error, Name, Ar) ->
+ {{'_',Name,Ar},
+ {internal_error,'_',{Error,erlang:get_stacktrace()}}}.
+
+-record(st, %Emulation state
+ {x=init_regs(0, term), %x register info.
+ y=init_regs(0, initialized), %y register info.
+ numy=none, %Number of y registers.
+ h=0, %Available heap size.
+ ct=[] %List of hot catch/try labels
+ }).
+
+-record(vst, %Validator state
+ {current=none, %Current state
+ branched=gb_trees:empty() %States at jumps
+ }).
+
+-ifdef(DEBUG).
+print_st(#st{x=Xs,y=Ys,numy=NumY,h=H,ct=Ct}) ->
+ io:format(" #st{x=~p~n"
+ " y=~p~n"
+ " numy=~p,h=~p,ct=~w~n",
+ [gb_trees:to_list(Xs),gb_trees:to_list(Ys),NumY,H,Ct]).
+-endif.
+
+validate_1(Is, Name, Arity, Entry) ->
+ validate_2(labels(Is), Name, Arity, Entry).
+
+validate_2({Ls1,[{func_info,{atom,Mod},{atom,Name},Arity}=_F|Is]},
+ Name, Arity, Entry) ->
+ lists:foreach(fun (_L) -> ?DBG_FORMAT(" ~p.~n", [_L]) end, Ls1),
+ ?DBG_FORMAT(" ~p.~n", [_F]),
+ validate_3(labels(Is), Name, Arity, Entry, Mod, Ls1);
+validate_2({Ls1,Is}, Name, Arity, _Entry) ->
+ error({{'_',Name,Arity},{first(Is),length(Ls1),illegal_instruction}}).
+
+validate_3({Ls2,Is}, Name, Arity, Entry, Mod, Ls1) ->
+ lists:foreach(fun (_L) -> ?DBG_FORMAT(" ~p.~n", [_L]) end, Ls2),
+ Offset = 1 + length(Ls2),
+ case lists:member(Entry, Ls2) of
+ true ->
+ St = init_state(Arity),
+ Vst = #vst{current=St,
+ branched=gb_trees_from_list([{L,St} || L <- Ls1])},
+ valfun(Is, {Mod,Name,Arity}, Offset, Vst);
+ false ->
+ error({{Mod,Name,Arity},{first(Is),Offset,no_entry_label}})
+ end.
+
+first([X|_]) -> X;
+first([]) -> [].
+
+labels(Is) ->
+ labels_1(Is, []).
+
+labels_1([{label,L}|Is], R) ->
+ labels_1(Is, [L|R]);
+labels_1(Is, R) ->
+ {lists:reverse(R),Is}.
+
+init_state(Arity) ->
+ Xs = init_regs(Arity, term),
+ Ys = init_regs(0, initialized),
+ #st{x=Xs,y=Ys,numy=none,h=0,ct=[]}.
+
+init_regs(0, _) ->
+ gb_trees:empty();
+init_regs(N, Type) ->
+ gb_trees_from_list([{R,Type} || R <- lists:seq(0, N-1)]).
+
+valfun([], _MFA, _Offset, Vst) -> Vst;
+valfun([I|Is], MFA, Offset, Vst) ->
+ ?DBG_FORMAT(" ~p.\n", [I]),
+ valfun(Is, MFA, Offset+1,
+ try valfun_1(I, Vst)
+ catch Error ->
+ error({MFA,{I,Offset,Error}})
+ end).
+
+%% Instructions that are allowed in dead code or when failing,
+%% that is while the state is undecided in some way.
+valfun_1({label,Lbl}, #vst{current=St0,branched=B}=Vst) ->
+ St = merge_states(Lbl, St0, B),
+ Vst#vst{current=St,branched=gb_trees:enter(Lbl, St, B)};
+valfun_1(_I, #vst{current=none}=Vst) ->
+ %% Ignore instructions after erlang:error/1,2, which
+ %% the original R10B compiler thought would return.
+ ?DBG_FORMAT("Ignoring ~p\n", [_I]),
+ Vst;
+valfun_1({badmatch,Src}, Vst) ->
+ assert_term(Src, Vst),
+ kill_state(Vst);
+valfun_1({case_end,Src}, Vst) ->
+ assert_term(Src, Vst),
+ kill_state(Vst);
+valfun_1(if_end, Vst) ->
+ kill_state(Vst);
+valfun_1({try_case_end,Src}, Vst) ->
+ assert_term(Src, Vst),
+ kill_state(Vst);
+%% Instructions that can not cause exceptions
+valfun_1({move,Src,Dst}, Vst) ->
+ Type = get_term_type(Src, Vst),
+ set_type_reg(Type, Dst, Vst);
+valfun_1({fmove,Src,{fr,_}}, Vst) ->
+ assert_type(float, Src, Vst);
+valfun_1({fmove,{fr,_},Dst}, Vst) ->
+ set_type_reg({float,[]}, Dst, Vst);
+valfun_1({kill,{y,_}=Reg}, Vst) ->
+ set_type_y(initialized, Reg, Vst);
+valfun_1({test_heap,Heap,Live}, Vst) ->
+ test_heap(Heap, Live, Vst);
+valfun_1({bif,_Op,nofail,Src,Dst}, Vst) ->
+ validate_src(Src, Vst),
+ set_type_reg(term, Dst, Vst);
+%% Put instructions.
+valfun_1({put_list,A,B,Dst}, Vst0) ->
+ assert_term(A, Vst0),
+ assert_term(B, Vst0),
+ Vst = eat_heap(2, Vst0),
+ set_type_reg(cons, Dst, Vst);
+valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) ->
+ Vst = eat_heap(1, Vst0),
+ set_type_reg({tuple,Sz}, Dst, Vst);
+valfun_1({put,Src}, Vst) ->
+ assert_term(Src, Vst),
+ eat_heap(1, Vst);
+valfun_1({put_string,Sz,_,Dst}, Vst0) when is_integer(Sz) ->
+ Vst = eat_heap(2*Sz, Vst0),
+ set_type_reg(cons, Dst, Vst);
+%% Allocate and deallocate, et.al
+valfun_1({allocate,Stk,Live}, Vst) ->
+ allocate(false, Stk, 0, Live, Vst);
+valfun_1({allocate_heap,Stk,Heap,Live}, Vst) ->
+ allocate(false, Stk, Heap, Live, Vst);
+valfun_1({allocate_zero,Stk,Live}, Vst) ->
+ allocate(true, Stk, 0, Live, Vst);
+valfun_1({allocate_heap_zero,Stk,Heap,Live}, Vst) ->
+ allocate(true, Stk, Heap, Live, Vst);
+valfun_1({init,{y,_}=Reg}, Vst) ->
+ set_type_y(initialized, Reg, Vst);
+valfun_1({deallocate,StkSize}, #vst{current=#st{numy=StkSize,ct=[]}}=Vst) ->
+ deallocate(Vst);
+valfun_1({deallocate,_}, #vst{current=#st{numy=NumY,ct=[]}}) ->
+ error({allocated,NumY});
+valfun_1({deallocate,_}, #vst{current=#st{ct=Fails}}) ->
+ error({catch_try_stack,Fails});
+%% Catch & try.
+valfun_1({'catch',Dst,{f,Fail}}, Vst0) when Fail /= none ->
+ Vst = #vst{current=#st{ct=Fails}=St} =
+ set_type_y({catchtag,Fail}, Dst, Vst0),
+ Vst#vst{current=St#st{ct=[Fail|Fails]}};
+valfun_1({'try',Dst,{f,Fail}}, Vst0) ->
+ Vst = #vst{current=#st{ct=Fails}=St} =
+ set_type_y({trytag,Fail}, Dst, Vst0),
+ Vst#vst{current=St#st{ct=[Fail|Fails]}};
+%% Do a postponed state branch if necessary and try next set of instructions
+valfun_1(I, #vst{current=#st{ct=[]}}=Vst) ->
+ valfun_2(I, Vst);
+valfun_1(I, #vst{current=#st{ct=Fails}}=Vst0) ->
+ %% Perform a postponed state branch
+ Vst = #vst{current=St} = lists:foldl(fun branch_state/2, Vst0, Fails),
+ valfun_2(I, Vst#vst{current=St#st{ct=[]}}).
+
+%% Instructions that can cause exceptions.
+valfun_2({apply,Live}, Vst) ->
+ call(Live+2, Vst);
+valfun_2({apply_last,Live,_}, Vst) ->
+ tail_call(Live+2, Vst);
+valfun_2({call_fun,Live}, Vst) ->
+ call(Live, Vst);
+valfun_2({call,Live,_}, Vst) ->
+ call(Live, Vst);
+valfun_2({call_ext,Live,Func}, Vst) ->
+ call(Func, Live, Vst);
+valfun_2({call_only,Live,_}, Vst) ->
+ tail_call(Live, Vst);
+valfun_2({call_ext_only,Live,_}, Vst) ->
+ tail_call(Live, Vst);
+valfun_2({call_last,Live,_,_}, Vst) ->
+ tail_call(Live, Vst);
+valfun_2({call_ext_last,Live,_,_}, Vst) ->
+ tail_call(Live, Vst);
+valfun_2({make_fun,_,_,Live}, Vst) ->
+ call(Live, Vst);
+valfun_2({make_fun2,_,_,_,Live}, Vst) ->
+ call(Live, Vst);
+%% Floating point.
+valfun_2({fconv,Src,{fr,_}}, Vst) ->
+ assert_term(Src, Vst);
+valfun_2({bif,fadd,_,[{fr,_},{fr,_}],{fr,_}}, Vst) ->
+ Vst;
+valfun_2({bif,fdiv,_,[{fr,_},{fr,_}],{fr,_}}, Vst) ->
+ Vst;
+valfun_2({bif,fmul,_,[{fr,_},{fr,_}],{fr,_}}, Vst) ->
+ Vst;
+valfun_2({bif,fnegate,_,[{fr,_}],{fr,_}}, Vst) ->
+ Vst;
+valfun_2({bif,fsub,_,[{fr,_},{fr,_}],{fr,_}}, Vst) ->
+ Vst;
+valfun_2(fclearerror, Vst) ->
+ Vst;
+valfun_2({fcheckerror,_}, Vst) ->
+ Vst;
+%% Other BIFs
+valfun_2({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) ->
+ TupleType0 = get_term_type(Tuple, Vst0),
+ PosType = get_term_type(Pos, Vst0),
+ Vst1 = branch_state(Fail, Vst0),
+ TupleType = upgrade_type({tuple,[get_tuple_size(PosType)]}, TupleType0),
+ Vst = set_type(TupleType, Tuple, Vst1),
+ set_type_reg(term, Dst, Vst);
+valfun_2({bif,Op,{f,Fail},Src,Dst}, Vst0) ->
+ validate_src(Src, Vst0),
+ Vst = branch_state(Fail, Vst0),
+ Type = bif_type(Op, Src, Vst),
+ set_type_reg(Type, Dst, Vst);
+valfun_2(return, #vst{current=#st{numy=none}}=Vst) ->
+ kill_state(Vst);
+valfun_2(return, #vst{current=#st{numy=NumY}}) ->
+ error({stack_frame,NumY});
+valfun_2({jump,{f,_}}, #vst{current=none}=Vst) ->
+ %% Must be an unreachable jump which was not optimized away.
+ %% Do nothing.
+ Vst;
+valfun_2({jump,{f,Lbl}}, Vst) ->
+ kill_state(branch_state(Lbl, Vst));
+valfun_2({loop_rec,{f,Fail},Dst}, Vst0) ->
+ Vst = branch_state(Fail, Vst0),
+ set_type_reg(term, Dst, Vst);
+valfun_2(remove_message, Vst) ->
+ Vst;
+valfun_2({wait,_}, Vst) ->
+ kill_state(Vst);
+valfun_2({wait_timeout,_,Src}, Vst) ->
+ assert_term(Src, Vst);
+valfun_2({loop_rec_end,_}, Vst) ->
+ kill_state(Vst);
+valfun_2(timeout, #vst{current=St}=Vst) ->
+ Vst#vst{current=St#st{x=init_regs(0, term)}};
+valfun_2(send, Vst) ->
+ call(2, Vst);
+%% Catch & try.
+valfun_2({catch_end,Reg}, Vst0) ->
+ case get_type(Reg, Vst0) of
+ {catchtag,_} ->
+ Vst = #vst{current=St} = set_type_reg(initialized, Reg, Vst0),
+ Xs = gb_trees_from_list([{0,term}]),
+ Vst#vst{current=St#st{x=Xs}};
+ Type ->
+ error({bad_type,Type})
+ end;
+valfun_2({try_end,Reg}, Vst) ->
+ case get_type(Reg, Vst) of
+ {trytag,_} ->
+ set_type_reg(initialized, Reg, Vst);
+ Type ->
+ error({bad_type,Type})
+ end;
+valfun_2({try_case,Reg}, Vst0) ->
+ case get_type(Reg, Vst0) of
+ {trytag,_} ->
+ Vst = #vst{current=St} = set_type_reg(initialized, Reg, Vst0),
+ Xs = gb_trees_from_list([{0,{atom,[]}},{1,term},{2,term}]),
+ Vst#vst{current=St#st{x=Xs}};
+ Type ->
+ error({bad_type,Type})
+ end;
+valfun_2({set_tuple_element,Src,Tuple,I}, Vst) ->
+ assert_term(Src, Vst),
+ assert_type({tuple_element,I+1}, Tuple, Vst);
+%% Match instructions.
+valfun_2({select_val,Src,{f,Fail},{list,Choices}}, Vst) ->
+ assert_term(Src, Vst),
+ Lbls = [L || {f,L} <- Choices]++[Fail],
+ kill_state(foldl(fun(L, S) -> branch_state(L, S) end, Vst, Lbls));
+valfun_2({select_tuple_arity,Tuple,{f,Fail},{list,Choices}}, Vst) ->
+ assert_type(tuple, Tuple, Vst),
+ kill_state(branch_arities(Choices, Tuple, branch_state(Fail, Vst)));
+valfun_2({get_list,Src,D1,D2}, Vst0) ->
+ assert_term(Src, Vst0),
+ Vst = set_type_reg(term, D1, Vst0),
+ set_type_reg(term, D2, Vst);
+valfun_2({get_tuple_element,Src,I,Dst}, Vst) ->
+ assert_type({tuple_element,I+1}, Src, Vst),
+ set_type_reg(term, Dst, Vst);
+valfun_2({bs_restore,_}, Vst) ->
+ Vst;
+valfun_2({bs_save,_}, Vst) ->
+ Vst;
+valfun_2({bs_start_match,{f,Fail},Src}, Vst) ->
+ assert_term(Src, Vst),
+ branch_state(Fail, Vst);
+valfun_2({test,bs_skip_bits,{f,Fail},[Src,_,_]}, Vst) ->
+ assert_term(Src, Vst),
+ branch_state(Fail, Vst);
+valfun_2({test,_,{f,Fail},[_,_,_,Dst]}, Vst0) ->
+ Vst = branch_state(Fail, Vst0),
+ set_type_reg({integer,[]}, Dst, Vst);
+valfun_2({test,bs_test_tail,{f,Fail},_}, Vst) ->
+ branch_state(Fail, Vst);
+%% Other test instructions.
+valfun_2({test,is_float,{f,Lbl},[Float]}, Vst0) ->
+ assert_term(Float, Vst0),
+ Vst = branch_state(Lbl, Vst0),
+ set_type({float,[]}, Float, Vst);
+valfun_2({test,is_tuple,{f,Lbl},[Tuple]}, Vst0) ->
+ assert_term(Tuple, Vst0),
+ Vst = branch_state(Lbl, Vst0),
+ set_type({tuple,[0]}, Tuple, Vst);
+valfun_2({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst0) when is_integer(Sz) ->
+ assert_type(tuple, Tuple, Vst0),
+ Vst = branch_state(Lbl, Vst0),
+ set_type_reg({tuple,Sz}, Tuple, Vst);
+valfun_2({test,_Op,{f,Lbl},Src}, Vst) ->
+ validate_src(Src, Vst),
+ branch_state(Lbl, Vst);
+valfun_2({bs_add,{f,Fail},[A,B,_],Dst}, Vst0) ->
+ assert_term(A, Vst0),
+ assert_term(B, Vst0),
+ Vst = branch_state(Fail, Vst0),
+ set_type_reg({integer,[]}, Dst, Vst);
+valfun_2({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst0) ->
+ assert_term(Src, Vst0),
+ Vst = branch_state(Fail, Vst0),
+ set_type_reg({integer,[]}, Dst, Vst);
+valfun_2({bs_init2,{f,Fail},_,Heap,_,_,Dst}, Vst0) ->
+ Vst1 = heap_alloc(Heap, Vst0),
+ Vst = branch_state(Fail, Vst1),
+ set_type_reg(binary, Dst, Vst);
+valfun_2({bs_put_string,Sz,_}, Vst) when is_integer(Sz) ->
+ Vst;
+valfun_2({bs_put_binary,{f,Fail},_,_,_,Src}, Vst0) ->
+ assert_term(Src, Vst0),
+ branch_state(Fail, Vst0);
+valfun_2({bs_put_float,{f,Fail},_,_,_,Src}, Vst0) ->
+ assert_term(Src, Vst0),
+ branch_state(Fail, Vst0);
+valfun_2({bs_put_integer,{f,Fail},_,_,_,Src}, Vst0) ->
+ assert_term(Src, Vst0),
+ branch_state(Fail, Vst0);
+%% Old bit syntax construction (before R10B).
+valfun_2({bs_init,_,_}, Vst) -> Vst;
+valfun_2({bs_need_buf,_}, Vst) -> Vst;
+valfun_2({bs_final,{f,Fail},Dst}, Vst0) ->
+ Vst = branch_state(Fail, Vst0),
+ set_type_reg(binary, Dst, Vst);
+%% Misc.
+valfun_2({'%live',Live}, Vst) ->
+ verify_live(Live, Vst),
+ Vst;
+valfun_2(_, _) ->
+ error(unknown_instruction).
+
+kill_state(#vst{current=#st{ct=[]}}=Vst) ->
+ Vst#vst{current=none};
+kill_state(#vst{current=#st{ct=Fails}}=Vst0) ->
+ Vst = lists:foldl(fun branch_state/2, Vst0, Fails),
+ Vst#vst{current=none}.
+
+%% A "plain" call.
+%% The stackframe must have a known size and be initialized.
+%% The instruction will return to the instruction following the call.
+call(Live, #vst{current=St}=Vst) ->
+ verify_live(Live, Vst),
+ verify_y_init(Vst),
+ Xs = gb_trees_from_list([{0,term}]),
+ Vst#vst{current=St#st{x=Xs}}.
+
+%% A "plain" call.
+%% The stackframe must have a known size and be initialized.
+%% The instruction will return to the instruction following the call.
+call(Name, Live, #vst{current=St}=Vst) ->
+ verify_live(Live, Vst),
+ case return_type(Name, Vst) of
+ exception ->
+ kill_state(Vst);
+ Type ->
+ verify_y_init(Vst),
+ Xs = gb_trees_from_list([{0,Type}]),
+ Vst#vst{current=St#st{x=Xs}}
+ end.
+
+%% Tail call.
+%% The stackframe must have a known size and be initialized.
+%% Does not return to the instruction following the call.
+tail_call(Live, Vst) ->
+ kill_state(call(Live, Vst)).
+
+allocate(Zero, Stk, Heap, Live, #vst{current=#st{numy=none}=St}=Vst) ->
+ verify_live(Live, Vst),
+ Ys = init_regs(case Zero of
+ true -> Stk;
+ false -> 0
+ end, initialized),
+ Vst#vst{current=St#st{y=Ys,numy=Stk,h=heap_alloc_1(Heap)}};
+allocate(_, _, _, _, #vst{current=#st{numy=Numy}}) ->
+ error({existing_stack_frame,{size,Numy}}).
+
+deallocate(#vst{current=St}=Vst) ->
+ Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none}}.
+
+test_heap(Heap, Live, Vst) ->
+ verify_live(Live, Vst),
+ heap_alloc(Heap, Vst).
+
+heap_alloc(Heap, #vst{current=St}=Vst) ->
+ Vst#vst{current=St#st{h=heap_alloc_1(Heap)}}.
+
+heap_alloc_1({alloc,Alloc}) ->
+ {value,{_,Heap}} = lists:keysearch(words, 1, Alloc),
+ Heap;
+heap_alloc_1(Heap) when is_integer(Heap) -> Heap.
+
+
+set_type(Type, {x,_}=Reg, Vst) -> set_type_reg(Type, Reg, Vst);
+set_type(Type, {y,_}=Reg, Vst) -> set_type_y(Type, Reg, Vst);
+set_type(_, _, #vst{}=Vst) -> Vst.
+
+set_type_reg(Type, {x,X}, #vst{current=#st{x=Xs}=St}=Vst)
+ when 0 =< X, X < ?MAXREG ->
+ Vst#vst{current=St#st{x=gb_trees:enter(X, Type, Xs)}};
+set_type_reg(Type, Reg, Vst) ->
+ set_type_y(Type, Reg, Vst).
+
+set_type_y(Type, {y,Y}=Reg, #vst{current=#st{y=Ys,numy=NumY}=St}=Vst)
+ when is_integer(Y), 0 =< Y, Y < ?MAXREG ->
+ case {Y,NumY} of
+ {_,none} ->
+ error({no_stack_frame,Reg});
+ {_,_} when Y > NumY ->
+ error({y_reg_out_of_range,Reg,NumY});
+ {_,_} ->
+ Vst#vst{current=St#st{y=gb_trees:enter(Y, Type, Ys)}}
+ end;
+set_type_y(Type, Reg, #vst{}) -> error({invalid_store,Reg,Type}).
+
+assert_term(Src, Vst) ->
+ get_term_type(Src, Vst),
+ Vst.
+
+%% The possible types.
+%%
+%% First non-term types:
+%%
+%% initialized Only for Y registers. Means that the Y register
+%% has been initialized with some valid term so that
+%% it is safe to pass to the garbage collector.
+%% NOT safe to use in any other way (will not crash the
+%% emulator, but clearly points to a bug in the compiler).
+%%
+%% {catchtag,Lbl} A special term used within a catch. Must only be used
+%% by the catch instructions; NOT safe to use in other
+%% instructions.
+%%
+%% {trytag,Lbl} A special term used within a try block. Must only be
+%% used by the catch instructions; NOT safe to use in other
+%% instructions.
+%%
+%% exception Can only be used as a type returned by return_type/2
+%% (which gives the type of the value returned by a BIF).
+%% Thus 'exception' is never stored as type descriptor
+%% for a register.
+%%
+%% Normal terms:
+%%
+%% term Any valid Erlang (but not of the special types above).
+%%
+%% bool The atom 'true' or the atom 'false'.
+%%
+%% cons Cons cell: [_|_]
+%%
+%% nil Empty list: []
+%%
+%% {tuple,[Sz]} Tuple. An element has been accessed using
+%% element/2 or setelement/3 so that it is known that
+%% the type is a tuple of size at least Sz.
+%%
+%% {tuple,Sz} Tuple. A test_arity instruction has been seen
+%% so that it is known that the size is exactly Sz.
+%%
+%% {atom,[]} Atom.
+%% {atom,Atom}
+%%
+%% {integer,[]} Integer.
+%% {integer,Integer}
+%%
+%% {float,[]} Float.
+%% {float,Float}
+%%
+%% number Integer or Float of unknown value
+%%
+
+assert_type(WantedType, Term, Vst) ->
+ assert_type(WantedType, get_type(Term, Vst)),
+ Vst.
+
+assert_type(float, {float,_}) -> ok;
+assert_type(tuple, {tuple,_}) -> ok;
+assert_type({tuple_element,I}, {tuple,[Sz]})
+ when 1 =< I, I =< Sz ->
+ ok;
+assert_type({tuple_element,I}, {tuple,Sz})
+ when is_integer(Sz), 1 =< I, I =< Sz ->
+ ok;
+assert_type(Needed, Actual) ->
+ error({bad_type,{needed,Needed},{actual,Actual}}).
+
+%% upgrade_type/2 is used when linear code finds out more and
+%% more information about a type, so the type gets "narrower"
+%% or perhaps inconsistent. In the case of inconsistency
+%% we mostly widen the type to 'term' to make subsequent
+%% code fail if it assumes anything about the type.
+
+upgrade_type(Same, Same) -> Same;
+upgrade_type(term, OldT) -> OldT;
+upgrade_type(NewT, term) -> NewT;
+upgrade_type({Type,New}=NewT, {Type,Old}=OldT)
+ when Type == atom; Type == integer; Type == float ->
+ if New =:= Old -> OldT;
+ New =:= [] -> OldT;
+ Old =:= [] -> NewT;
+ true -> term
+ end;
+upgrade_type({Type,_}=NewT, number)
+ when Type == integer; Type == float ->
+ NewT;
+upgrade_type(number, {Type,_}=OldT)
+ when Type == integer; Type == float ->
+ OldT;
+upgrade_type(bool, {atom,A}) ->
+ upgrade_bool(A);
+upgrade_type({atom,A}, bool) ->
+ upgrade_bool(A);
+upgrade_type({tuple,[Sz]}, {tuple,[OldSz]})
+ when is_integer(Sz) ->
+ {tuple,[max(Sz, OldSz)]};
+upgrade_type({tuple,Sz}=T, {tuple,[_]})
+ when is_integer(Sz) ->
+ %% This also takes care of the user error when a tuple element
+ %% is accesed outside the known exact tuple size; there is
+ %% no more type information, just a runtime error which is not
+ %% our problem.
+ T;
+upgrade_type({tuple,[Sz]}, {tuple,_}=T)
+ when is_integer(Sz) ->
+ %% Same as the previous clause but mirrored.
+ T;
+upgrade_type(_A, _B) ->
+ %%io:format("upgrade_type: ~p ~p\n", [_A,_B]),
+ term.
+
+upgrade_bool([]) -> bool;
+upgrade_bool(true) -> {atom,true};
+upgrade_bool(false) -> {atom,false};
+upgrade_bool(_) -> term.
+
+get_tuple_size({integer,[]}) -> 0;
+get_tuple_size({integer,Sz}) -> Sz;
+get_tuple_size(_) -> 0.
+
+validate_src(Ss, Vst) when is_list(Ss) ->
+ foldl(fun(S, _) -> get_type(S, Vst) end, ok, Ss).
+
+get_term_type(Src, Vst) ->
+ case get_type(Src, Vst) of
+ initialized -> error({not_assigned,Src});
+ exception -> error({exception,Src});
+ {catchtag,_} -> error({catchtag,Src});
+ {trytag,_} -> error({trytag,Src});
+ Type -> Type
+ end.
+
+get_type(nil=T, _) -> T;
+get_type({atom,A}=T, _) when is_atom(A) -> T;
+get_type({float,F}=T, _) when is_float(F) -> T;
+get_type({integer,I}=T, _) when is_integer(I) -> T;
+get_type({x,X}=Reg, #vst{current=#st{x=Xs}}) when is_integer(X) ->
+ case gb_trees:lookup(X, Xs) of
+ {value,Type} -> Type;
+ none -> error({uninitialized_reg,Reg})
+ end;
+get_type({y,Y}=Reg, #vst{current=#st{y=Ys}}) when is_integer(Y) ->
+ case gb_trees:lookup(Y, Ys) of
+ {value,initialized} -> error({unassigned_reg,Reg});
+ {value,Type} -> Type;
+ none -> error({uninitialized_reg,Reg})
+ end;
+get_type(Src, _) -> error({bad_source,Src}).
+
+branch_arities([], _, #vst{}=Vst) -> Vst;
+branch_arities([Sz,{f,L}|T], Tuple, #vst{current=St}=Vst0)
+ when is_integer(Sz) ->
+ Vst1 = set_type_reg({tuple,Sz}, Tuple, Vst0),
+ Vst = branch_state(L, Vst1),
+ branch_arities(T, Tuple, Vst#vst{current=St}).
+
+branch_state(0, #vst{}=Vst) -> Vst;
+branch_state(L, #vst{current=St,branched=B}=Vst) ->
+ Vst#vst{
+ branched=case gb_trees:is_defined(L, B) of
+ false ->
+ gb_trees:insert(L, St#st{ct=[]}, B);
+ true ->
+ MergedSt = merge_states(L, St, B),
+ gb_trees:update(L, MergedSt#st{ct=[]}, B)
+ end}.
+
+%% merge_states/3 is used when there are more than one way to arrive
+%% at this point, and the type states for the different paths has
+%% to be merged. The type states are downgraded to the least common
+%% subset for the subsequent code.
+
+merge_states(0, St, _Branched) -> St;
+merge_states(L, St, Branched) ->
+ case gb_trees:lookup(L, Branched) of
+ none -> St;
+ {value,OtherSt} when St == none -> OtherSt;
+ {value,OtherSt} ->
+ merge_states_1(St, OtherSt)
+ end.
+
+merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0}=St,
+ #st{x=Xs1,y=Ys1,numy=NumY1,h=H1}) ->
+ NumY = merge_stk(NumY0, NumY1),
+ Xs = merge_regs(Xs0, Xs1),
+ Ys = merge_regs(Ys0, Ys1),
+ St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1)}.
+
+merge_stk(S, S) -> S;
+merge_stk(_, _) -> undecided.
+
+merge_regs(Rs0, Rs1) ->
+ Rs = merge_regs_1(gb_trees:to_list(Rs0), gb_trees:to_list(Rs1)),
+ gb_trees_from_list(Rs).
+
+merge_regs_1([Same|Rs1], [Same|Rs2]) ->
+ [Same|merge_regs_1(Rs1, Rs2)];
+merge_regs_1([{R1,_}|Rs1], [{R2,_}|_]=Rs2) when R1 < R2 ->
+ merge_regs_1(Rs1, Rs2);
+merge_regs_1([{R1,_}|_]=Rs1, [{R2,_}|Rs2]) when R1 > R2 ->
+ merge_regs_1(Rs1, Rs2);
+merge_regs_1([{R,Type1}|Rs1], [{R,Type2}|Rs2]) ->
+ [{R,merge_types(Type1, Type2)}|merge_regs_1(Rs1, Rs2)];
+merge_regs_1([], []) -> [];
+merge_regs_1([], [_|_]) -> [];
+merge_regs_1([_|_], []) -> [].
+
+merge_types(T, T) -> T;
+merge_types(initialized=I, _) -> I;
+merge_types(_, initialized=I) -> I;
+merge_types({tuple,Same}=T, {tuple,Same}) -> T;
+merge_types({tuple,A}, {tuple,B}) ->
+ {tuple,[min(tuple_sz(A), tuple_sz(B))]};
+merge_types({Type,A}, {Type,B})
+ when Type == atom; Type == integer; Type == float ->
+ if A =:= B -> {Type,A};
+ true -> {Type,[]}
+ end;
+merge_types({Type,_}, number)
+ when Type == integer; Type == float ->
+ number;
+merge_types(number, {Type,_})
+ when Type == integer; Type == float ->
+ number;
+merge_types(bool, {atom,A}) ->
+ merge_bool(A);
+merge_types({atom,A}, bool) ->
+ merge_bool(A);
+merge_types(_, _) -> term.
+
+tuple_sz([Sz]) -> Sz;
+tuple_sz(Sz) -> Sz.
+
+merge_bool([]) -> {atom,[]};
+merge_bool(true) -> bool;
+merge_bool(false) -> bool;
+merge_bool(_) -> {atom,[]}.
+
+verify_y_init(#vst{current=#st{numy=none}}) -> ok;
+verify_y_init(#vst{current=#st{numy=undecided}}) ->
+ error(unknown_size_of_stackframe);
+verify_y_init(#vst{current=#st{y=Ys,numy=NumY}}) ->
+ verify_y_init_1(NumY, Ys).
+
+verify_y_init_1(0, _) -> ok;
+verify_y_init_1(N, Ys) ->
+ Y = N-1,
+ case gb_trees:is_defined(Y, Ys) of
+ false -> error({{y,Y},not_initialized});
+ true -> verify_y_init_1(Y, Ys)
+ end.
+
+verify_live(0, #vst{}) -> ok;
+verify_live(N, #vst{current=#st{x=Xs}}) ->
+ verify_live_1(N, Xs).
+
+verify_live_1(0, _) -> ok;
+verify_live_1(N, Xs) ->
+ X = N-1,
+ case gb_trees:is_defined(X, Xs) of
+ false -> error({{x,X},not_live});
+ true -> verify_live_1(X, Xs)
+ end.
+
+eat_heap(N, #vst{current=#st{h=Heap0}=St}=Vst) ->
+ case Heap0-N of
+ Neg when Neg < 0 ->
+ error({heap_overflow,{left,Heap0},{wanted,N}});
+ Heap ->
+ Vst#vst{current=St#st{h=Heap}}
+ end.
+
+bif_type('-', Src, Vst) ->
+ arith_type(Src, Vst);
+bif_type('+', Src, Vst) ->
+ arith_type(Src, Vst);
+bif_type('*', Src, Vst) ->
+ arith_type(Src, Vst);
+bif_type(abs, [Num], Vst) ->
+ case get_type(Num, Vst) of
+ {float,_}=T -> T;
+ {integer,_}=T -> T;
+ _ -> number
+ end;
+bif_type(float, _, _) -> {float,[]};
+bif_type('/', _, _) -> {float,[]};
+%% Integer operations.
+bif_type('div', [_,_], _) -> {integer,[]};
+bif_type('rem', [_,_], _) -> {integer,[]};
+bif_type(length, [_], _) -> {integer,[]};
+bif_type(size, [_], _) -> {integer,[]};
+bif_type(trunc, [_], _) -> {integer,[]};
+bif_type(round, [_], _) -> {integer,[]};
+bif_type('band', [_,_], _) -> {integer,[]};
+bif_type('bor', [_,_], _) -> {integer,[]};
+bif_type('bxor', [_,_], _) -> {integer,[]};
+bif_type('bnot', [_], _) -> {integer,[]};
+bif_type('bsl', [_,_], _) -> {integer,[]};
+bif_type('bsr', [_,_], _) -> {integer,[]};
+%% Booleans.
+bif_type('==', [_,_], _) -> bool;
+bif_type('/=', [_,_], _) -> bool;
+bif_type('=<', [_,_], _) -> bool;
+bif_type('<', [_,_], _) -> bool;
+bif_type('>=', [_,_], _) -> bool;
+bif_type('>', [_,_], _) -> bool;
+bif_type('=:=', [_,_], _) -> bool;
+bif_type('=/=', [_,_], _) -> bool;
+bif_type('not', [_], _) -> bool;
+bif_type('and', [_,_], _) -> bool;
+bif_type('or', [_,_], _) -> bool;
+bif_type('xor', [_,_], _) -> bool;
+bif_type(is_atom, [_], _) -> bool;
+bif_type(is_boolean, [_], _) -> bool;
+bif_type(is_binary, [_], _) -> bool;
+bif_type(is_constant, [_], _) -> bool;
+bif_type(is_float, [_], _) -> bool;
+bif_type(is_function, [_], _) -> bool;
+bif_type(is_integer, [_], _) -> bool;
+bif_type(is_list, [_], _) -> bool;
+bif_type(is_number, [_], _) -> bool;
+bif_type(is_pid, [_], _) -> bool;
+bif_type(is_port, [_], _) -> bool;
+bif_type(is_reference, [_], _) -> bool;
+bif_type(is_tuple, [_], _) -> bool;
+%% Misc.
+bif_type(node, [], _) -> {atom,[]};
+bif_type(node, [_], _) -> {atom,[]};
+bif_type(hd, [_], _) -> term;
+bif_type(tl, [_], _) -> term;
+bif_type(get, [_], _) -> term;
+bif_type(raise, [_,_], _) -> exception;
+bif_type(_, _, _) -> term.
+
+arith_type([A,B], Vst) ->
+ case {get_type(A, Vst),get_type(B, Vst)} of
+ {{float,_},_} -> {float,[]};
+ {_,{float,_}} -> {float,[]};
+ {_,_} -> number
+ end;
+arith_type(_, _) -> number.
+
+return_type({extfunc,M,F,A}, Vst) ->
+ return_type_1(M, F, A, Vst).
+
+return_type_1(erlang, setelement, 3, Vst) ->
+ Tuple = {x,1},
+ TupleType =
+ case get_type(Tuple, Vst) of
+ {tuple,_}=TT -> TT;
+ _ -> {tuple,[0]}
+ end,
+ case get_type({x,0}, Vst) of
+ {integer,[]} -> TupleType;
+ {integer,I} -> upgrade_type({tuple,[I]}, TupleType);
+ _ -> TupleType
+ end;
+return_type_1(erlang, F, A, _) ->
+ return_type_erl(F, A);
+return_type_1(math, F, A, _) ->
+ return_type_math(F, A);
+return_type_1(_, _, _, _) -> term.
+
+return_type_erl(exit, 1) -> exception;
+return_type_erl(throw, 1) -> exception;
+return_type_erl(fault, 1) -> exception;
+return_type_erl(fault, 2) -> exception;
+return_type_erl(error, 1) -> exception;
+return_type_erl(error, 2) -> exception;
+return_type_erl(_, _) -> term.
+
+return_type_math(cos, 1) -> {float,[]};
+return_type_math(cosh, 1) -> {float,[]};
+return_type_math(sin, 1) -> {float,[]};
+return_type_math(sinh, 1) -> {float,[]};
+return_type_math(tan, 1) -> {float,[]};
+return_type_math(tanh, 1) -> {float,[]};
+return_type_math(acos, 1) -> {float,[]};
+return_type_math(acosh, 1) -> {float,[]};
+return_type_math(asin, 1) -> {float,[]};
+return_type_math(asinh, 1) -> {float,[]};
+return_type_math(atan, 1) -> {float,[]};
+return_type_math(atanh, 1) -> {float,[]};
+return_type_math(erf, 1) -> {float,[]};
+return_type_math(erfc, 1) -> {float,[]};
+return_type_math(exp, 1) -> {float,[]};
+return_type_math(log, 1) -> {float,[]};
+return_type_math(log10, 1) -> {float,[]};
+return_type_math(sqrt, 1) -> {float,[]};
+return_type_math(atan2, 2) -> {float,[]};
+return_type_math(pow, 2) -> {float,[]};
+return_type_math(pi, 0) -> {float,[]};
+return_type_math(_, _) -> term.
+
+min(A, B) when is_integer(A), is_integer(B), A < B -> A;
+min(A, B) when is_integer(A), is_integer(B) -> B.
+
+max(A, B) when is_integer(A), is_integer(B), A > B -> A;
+max(A, B) when is_integer(A), is_integer(B) -> B.
+
+gb_trees_from_list(L) -> gb_trees:from_orddict(orddict:from_list(L)).
+
+-ifdef(DEBUG).
+error(Error) -> exit(Error).
+-else.
+error(Error) -> throw(Error).
+-endif.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl.erl
new file mode 100644
index 0000000000..be9e088276
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl.erl
@@ -0,0 +1,4169 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Richard Carlsson.
+%% Copyright (C) 1999-2002 Richard Carlsson.
+%% Portions created by Ericsson are Copyright 2001, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: cerl.erl,v 1.3 2010/03/04 13:54:20 maria Exp $
+
+%% =====================================================================
+%% @doc Core Erlang abstract syntax trees.
+%%
+%% <p> This module defines an abstract data type for representing Core
+%% Erlang source code as syntax trees.</p>
+%%
+%% <p>A recommended starting point for the first-time user is the
+%% documentation of the function <a
+%% href="#type-1"><code>type/1</code></a>.</p>
+%%
+%% <h3><b>NOTES:</b></h3>
+%%
+%% <p>This module deals with the composition and decomposition of
+%% <em>syntactic</em> entities (as opposed to semantic ones); its
+%% purpose is to hide all direct references to the data structures
+%% used to represent these entities. With few exceptions, the
+%% functions in this module perform no semantic interpretation of
+%% their inputs, and in general, the user is assumed to pass
+%% type-correct arguments - if this is not done, the effects are not
+%% defined.</p>
+%%
+%% <p>The internal representations of abstract syntax trees are
+%% subject to change without notice, and should not be documented
+%% outside this module. Furthermore, we do not give any guarantees on
+%% how an abstract syntax tree may or may not be represented, <em>with
+%% the following exceptions</em>: no syntax tree is represented by a
+%% single atom, such as <code>none</code>, by a list constructor
+%% <code>[X | Y]</code>, or by the empty list <code>[]</code>. This
+%% can be relied on when writing functions that operate on syntax
+%% trees.</p>
+%%
+%% @type cerl(). An abstract Core Erlang syntax tree.
+%%
+%% <p>Every abstract syntax tree has a <em>type</em>, given by the
+%% function <a href="#type-1"><code>type/1</code></a>. In addition,
+%% each syntax tree has a list of <em>user annotations</em> (cf. <a
+%% href="#get_ann-1"><code>get_ann/1</code></a>), which are included
+%% in the Core Erlang syntax.</p>
+
+-module(cerl).
+
+-export([abstract/1, add_ann/2, alias_pat/1, alias_var/1,
+ ann_abstract/2, ann_c_alias/3, ann_c_apply/3, ann_c_atom/2,
+ ann_c_call/4, ann_c_case/3, ann_c_catch/2, ann_c_char/2,
+ ann_c_clause/3, ann_c_clause/4, ann_c_cons/3, ann_c_float/2,
+ ann_c_fname/3, ann_c_fun/3, ann_c_int/2, ann_c_let/4,
+ ann_c_letrec/3, ann_c_module/4, ann_c_module/5, ann_c_nil/1,
+ ann_c_cons_skel/3, ann_c_tuple_skel/2, ann_c_primop/3,
+ ann_c_receive/2, ann_c_receive/4, ann_c_seq/3, ann_c_string/2,
+ ann_c_try/6, ann_c_tuple/2, ann_c_values/2, ann_c_var/2,
+ ann_make_data/3, ann_make_list/2, ann_make_list/3,
+ ann_make_data_skel/3, ann_make_tree/3, apply_args/1,
+ apply_arity/1, apply_op/1, atom_lit/1, atom_name/1, atom_val/1,
+ c_alias/2, c_apply/2, c_atom/1, c_call/3, c_case/2, c_catch/1,
+ c_char/1, c_clause/2, c_clause/3, c_cons/2, c_float/1,
+ c_fname/2, c_fun/2, c_int/1, c_let/3, c_letrec/2, c_module/3,
+ c_module/4, c_nil/0, c_cons_skel/2, c_tuple_skel/1, c_primop/2,
+ c_receive/1, c_receive/3, c_seq/2, c_string/1, c_try/5,
+ c_tuple/1, c_values/1, c_var/1, call_args/1, call_arity/1,
+ call_module/1, call_name/1, case_arg/1, case_arity/1,
+ case_clauses/1, catch_body/1, char_lit/1, char_val/1,
+ clause_arity/1, clause_body/1, clause_guard/1, clause_pats/1,
+ clause_vars/1, concrete/1, cons_hd/1, cons_tl/1, copy_ann/2,
+ data_arity/1, data_es/1, data_type/1, float_lit/1, float_val/1,
+ fname_arity/1, fname_id/1, fold_literal/1, from_records/1,
+ fun_arity/1, fun_body/1, fun_vars/1, get_ann/1, int_lit/1,
+ int_val/1, is_c_alias/1, is_c_apply/1, is_c_atom/1,
+ is_c_call/1, is_c_case/1, is_c_catch/1, is_c_char/1,
+ is_c_clause/1, is_c_cons/1, is_c_float/1, is_c_fname/1,
+ is_c_fun/1, is_c_int/1, is_c_let/1, is_c_letrec/1, is_c_list/1,
+ is_c_module/1, is_c_nil/1, is_c_primop/1, is_c_receive/1,
+ is_c_seq/1, is_c_string/1, is_c_try/1, is_c_tuple/1,
+ is_c_values/1, is_c_var/1, is_data/1, is_leaf/1, is_literal/1,
+ is_literal_term/1, is_print_char/1, is_print_string/1,
+ let_arg/1, let_arity/1, let_body/1, let_vars/1, letrec_body/1,
+ letrec_defs/1, letrec_vars/1, list_elements/1, list_length/1,
+ make_data/2, make_list/1, make_list/2, make_data_skel/2,
+ make_tree/2, meta/1, module_attrs/1, module_defs/1,
+ module_exports/1, module_name/1, module_vars/1,
+ pat_list_vars/1, pat_vars/1, primop_args/1, primop_arity/1,
+ primop_name/1, receive_action/1, receive_clauses/1,
+ receive_timeout/1, seq_arg/1, seq_body/1, set_ann/2,
+ string_lit/1, string_val/1, subtrees/1, to_records/1,
+ try_arg/1, try_body/1, try_vars/1, try_evars/1, try_handler/1,
+ tuple_arity/1, tuple_es/1, type/1, unfold_literal/1,
+ update_c_alias/3, update_c_apply/3, update_c_call/4,
+ update_c_case/3, update_c_catch/2, update_c_clause/4,
+ update_c_cons/3, update_c_cons_skel/3, update_c_fname/2,
+ update_c_fname/3, update_c_fun/3, update_c_let/4,
+ update_c_letrec/3, update_c_module/5, update_c_primop/3,
+ update_c_receive/4, update_c_seq/3, update_c_try/6,
+ update_c_tuple/2, update_c_tuple_skel/2, update_c_values/2,
+ update_c_var/2, update_data/3, update_list/2, update_list/3,
+ update_data_skel/3, update_tree/2, update_tree/3,
+ values_arity/1, values_es/1, var_name/1, c_binary/1,
+ update_c_binary/2, ann_c_binary/2, is_c_binary/1,
+ binary_segments/1, c_bitstr/3, c_bitstr/4, c_bitstr/5,
+ update_c_bitstr/5, update_c_bitstr/6, ann_c_bitstr/5,
+ ann_c_bitstr/6, is_c_bitstr/1, bitstr_val/1, bitstr_size/1,
+ bitstr_bitsize/1, bitstr_unit/1, bitstr_type/1,
+ bitstr_flags/1]).
+
+-include("core_parse.hrl").
+
+
+%% =====================================================================
+%% Representation (general)
+%%
+%% All nodes are represented by tuples of arity 2 or (generally)
+%% greater, whose first element is an atom which uniquely identifies the
+%% type of the node, and whose second element is a (proper) list of
+%% annotation terms associated with the node - this is by default empty.
+%%
+%% For most node constructor functions, there are analogous functions
+%% named 'ann_...', taking one extra argument 'As' (always the first
+%% argument), specifying an annotation list at node creation time.
+%% Similarly, there are also functions named 'update_...', taking one
+%% extra argument 'Old', specifying a node from which all fields not
+%% explicitly given as arguments should be copied (generally, this is
+%% the annotation field only).
+%% =====================================================================
+
+%% This defines the general representation of constant literals:
+
+-record(literal, {ann = [], val}).
+
+
+%% @spec type(Node::cerl()) -> atom()
+%%
+%% @doc Returns the type tag of <code>Node</code>. Current node types
+%% are:
+%%
+%% <p><center><table border="1">
+%% <tr>
+%% <td>alias</td>
+%% <td>apply</td>
+%% <td>binary</td>
+%% <td>bitstr</td>
+%% <td>call</td>
+%% <td>case</td>
+%% <td>catch</td>
+%% </tr><tr>
+%% <td>clause</td>
+%% <td>cons</td>
+%% <td>fun</td>
+%% <td>let</td>
+%% <td>letrec</td>
+%% <td>literal</td>
+%% <td>module</td>
+%% </tr><tr>
+%% <td>primop</td>
+%% <td>receive</td>
+%% <td>seq</td>
+%% <td>try</td>
+%% <td>tuple</td>
+%% <td>values</td>
+%% <td>var</td>
+%% </tr>
+%% </table></center></p>
+%%
+%% <p>Note: The name of the primary constructor function for a node
+%% type is always the name of the type itself, prefixed by
+%% "<code>c_</code>"; recognizer predicates are correspondingly
+%% prefixed by "<code>is_c_</code>". Furthermore, to simplify
+%% preservation of annotations (cf. <code>get_ann/1</code>), there are
+%% analogous constructor functions prefixed by "<code>ann_c_</code>"
+%% and "<code>update_c_</code>", for setting the annotation list of
+%% the new node to either a specific value or to the annotations of an
+%% existing node, respectively.</p>
+%%
+%% @see abstract/1
+%% @see c_alias/2
+%% @see c_apply/2
+%% @see c_binary/1
+%% @see c_bitstr/5
+%% @see c_call/3
+%% @see c_case/2
+%% @see c_catch/1
+%% @see c_clause/3
+%% @see c_cons/2
+%% @see c_fun/2
+%% @see c_let/3
+%% @see c_letrec/2
+%% @see c_module/3
+%% @see c_primop/2
+%% @see c_receive/1
+%% @see c_seq/2
+%% @see c_try/3
+%% @see c_tuple/1
+%% @see c_values/1
+%% @see c_var/1
+%% @see get_ann/1
+%% @see to_records/1
+%% @see from_records/1
+%% @see data_type/1
+%% @see subtrees/1
+%% @see meta/1
+
+type(Node) ->
+ element(1, Node).
+
+
+%% @spec is_leaf(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is a leaf node,
+%% otherwise <code>false</code>. The current leaf node types are
+%% <code>literal</code> and <code>var</code>.
+%%
+%% <p>Note: all literals (cf. <code>is_literal/1</code>) are leaf
+%% nodes, even if they represent structured (constant) values such as
+%% <code>{foo, [bar, baz]}</code>. Also note that variables are leaf
+%% nodes but not literals.</p>
+%%
+%% @see type/1
+%% @see is_literal/1
+
+is_leaf(Node) ->
+ case type(Node) of
+ literal -> true;
+ var -> true;
+ _ -> false
+ end.
+
+
+%% @spec get_ann(cerl()) -> [term()]
+%%
+%% @doc Returns the list of user annotations associated with a syntax
+%% tree node. For a newly created node, this is the empty list. The
+%% annotations may be any terms.
+%%
+%% @see set_ann/2
+
+get_ann(Node) ->
+ element(2, Node).
+
+
+%% @spec set_ann(Node::cerl(), Annotations::[term()]) -> cerl()
+%%
+%% @doc Sets the list of user annotations of <code>Node</code> to
+%% <code>Annotations</code>.
+%%
+%% @see get_ann/1
+%% @see add_ann/2
+%% @see copy_ann/2
+
+set_ann(Node, List) ->
+ setelement(2, Node, List).
+
+
+%% @spec add_ann(Annotations::[term()], Node::cerl()) -> cerl()
+%%
+%% @doc Appends <code>Annotations</code> to the list of user
+%% annotations of <code>Node</code>.
+%%
+%% <p>Note: this is equivalent to <code>set_ann(Node, Annotations ++
+%% get_ann(Node))</code>, but potentially more efficient.</p>
+%%
+%% @see get_ann/1
+%% @see set_ann/2
+
+add_ann(Terms, Node) ->
+ set_ann(Node, Terms ++ get_ann(Node)).
+
+
+%% @spec copy_ann(Source::cerl(), Target::cerl()) -> cerl()
+%%
+%% @doc Copies the list of user annotations from <code>Source</code>
+%% to <code>Target</code>.
+%%
+%% <p>Note: this is equivalent to <code>set_ann(Target,
+%% get_ann(Source))</code>, but potentially more efficient.</p>
+%%
+%% @see get_ann/1
+%% @see set_ann/2
+
+copy_ann(Source, Target) ->
+ set_ann(Target, get_ann(Source)).
+
+
+%% @spec abstract(Term::term()) -> cerl()
+%%
+%% @doc Creates a syntax tree corresponding to an Erlang term.
+%% <code>Term</code> must be a literal term, i.e., one that can be
+%% represented as a source code literal. Thus, it may not contain a
+%% process identifier, port, reference, binary or function value as a
+%% subterm.
+%%
+%% <p>Note: This is a constant time operation.</p>
+%%
+%% @see ann_abstract/2
+%% @see concrete/1
+%% @see is_literal/1
+%% @see is_literal_term/1
+
+abstract(T) ->
+ #literal{val = T}.
+
+
+%% @spec ann_abstract(Annotations::[term()], Term::term()) -> cerl()
+%% @see abstract/1
+
+ann_abstract(As, T) ->
+ #literal{val = T, ann = As}.
+
+
+%% @spec is_literal_term(Term::term()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Term</code> can be
+%% represented as a literal, otherwise <code>false</code>. This
+%% function takes time proportional to the size of <code>Term</code>.
+%%
+%% @see abstract/1
+
+is_literal_term(T) when integer(T) -> true;
+is_literal_term(T) when float(T) -> true;
+is_literal_term(T) when atom(T) -> true;
+is_literal_term([]) -> true;
+is_literal_term([H | T]) ->
+ case is_literal_term(H) of
+ true ->
+ is_literal_term(T);
+ false ->
+ false
+ end;
+is_literal_term(T) when tuple(T) ->
+ is_literal_term_list(tuple_to_list(T));
+is_literal_term(_) ->
+ false.
+
+is_literal_term_list([T | Ts]) ->
+ case is_literal_term(T) of
+ true ->
+ is_literal_term_list(Ts);
+ false ->
+ false
+ end;
+is_literal_term_list([]) ->
+ true.
+
+
+%% @spec concrete(Node::cerl()) -> term()
+%%
+%% @doc Returns the Erlang term represented by a syntax tree. An
+%% exception is thrown if <code>Node</code> does not represent a
+%% literal term.
+%%
+%% <p>Note: This is a constant time operation.</p>
+%%
+%% @see abstract/1
+%% @see is_literal/1
+
+%% Because the normal tuple and list constructor operations always
+%% return a literal if the arguments are literals, 'concrete' and
+%% 'is_literal' never need to traverse the structure.
+
+concrete(#literal{val = V}) ->
+ V.
+
+
+%% @spec is_literal(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% literal term, otherwise <code>false</code>. This function returns
+%% <code>true</code> if and only if the value of
+%% <code>concrete(Node)</code> is defined.
+%%
+%% <p>Note: This is a constant time operation.</p>
+%%
+%% @see abstract/1
+%% @see concrete/1
+%% @see fold_literal/1
+
+is_literal(#literal{}) ->
+ true;
+is_literal(_) ->
+ false.
+
+
+%% @spec fold_literal(Node::cerl()) -> cerl()
+%%
+%% @doc Assures that literals have a compact representation. This is
+%% occasionally useful if <code>c_cons_skel/2</code>,
+%% <code>c_tuple_skel/1</code> or <code>unfold_literal/1</code> were
+%% used in the construction of <code>Node</code>, and you want to revert
+%% to the normal "folded" representation of literals. If
+%% <code>Node</code> represents a tuple or list constructor, its
+%% elements are rewritten recursively, and the node is reconstructed
+%% using <code>c_cons/2</code> or <code>c_tuple/1</code>, respectively;
+%% otherwise, <code>Node</code> is not changed.
+%%
+%% @see is_literal/1
+%% @see c_cons_skel/2
+%% @see c_tuple_skel/1
+%% @see c_cons/2
+%% @see c_tuple/1
+%% @see unfold_literal/1
+
+fold_literal(Node) ->
+ case type(Node) of
+ tuple ->
+ update_c_tuple(Node, fold_literal_list(tuple_es(Node)));
+ cons ->
+ update_c_cons(Node, fold_literal(cons_hd(Node)),
+ fold_literal(cons_tl(Node)));
+ _ ->
+ Node
+ end.
+
+fold_literal_list([E | Es]) ->
+ [fold_literal(E) | fold_literal_list(Es)];
+fold_literal_list([]) ->
+ [].
+
+
+%% @spec unfold_literal(Node::cerl()) -> cerl()
+%%
+%% @doc Assures that literals have a fully expanded representation. If
+%% <code>Node</code> represents a literal tuple or list constructor, its
+%% elements are rewritten recursively, and the node is reconstructed
+%% using <code>c_cons_skel/2</code> or <code>c_tuple_skel/1</code>,
+%% respectively; otherwise, <code>Node</code> is not changed. The {@link
+%% fold_literal/1} can be used to revert to the normal compact
+%% representation.
+%%
+%% @see is_literal/1
+%% @see c_cons_skel/2
+%% @see c_tuple_skel/1
+%% @see c_cons/2
+%% @see c_tuple/1
+%% @see fold_literal/1
+
+unfold_literal(Node) ->
+ case type(Node) of
+ literal ->
+ copy_ann(Node, unfold_concrete(concrete(Node)));
+ _ ->
+ Node
+ end.
+
+unfold_concrete(Val) ->
+ case Val of
+ _ when tuple(Val) ->
+ c_tuple_skel(unfold_concrete_list(tuple_to_list(Val)));
+ [H|T] ->
+ c_cons_skel(unfold_concrete(H), unfold_concrete(T));
+ _ ->
+ abstract(Val)
+ end.
+
+unfold_concrete_list([E | Es]) ->
+ [unfold_concrete(E) | unfold_concrete_list(Es)];
+unfold_concrete_list([]) ->
+ [].
+
+
+%% ---------------------------------------------------------------------
+
+-record(module, {ann = [], name, exports, attrs, defs}).
+
+
+%% @spec c_module(Name::cerl(), Exports, Definitions) -> cerl()
+%%
+%% Exports = [cerl()]
+%% Definitions = [{cerl(), cerl()}]
+%%
+%% @equiv c_module(Name, Exports, [], Definitions)
+
+c_module(Name, Exports, Es) ->
+ #module{name = Name, exports = Exports, attrs = [], defs = Es}.
+
+
+%% @spec c_module(Name::cerl(), Exports, Attributes, Definitions) ->
+%% cerl()
+%%
+%% Exports = [cerl()]
+%% Attributes = [{cerl(), cerl()}]
+%% Definitions = [{cerl(), cerl()}]
+%%
+%% @doc Creates an abstract module definition. The result represents
+%% <pre>
+%% module <em>Name</em> [<em>E1</em>, ..., <em>Ek</em>]
+%% attributes [<em>K1</em> = <em>T1</em>, ...,
+%% <em>Km</em> = <em>Tm</em>]
+%% <em>V1</em> = <em>F1</em>
+%% ...
+%% <em>Vn</em> = <em>Fn</em>
+%% end</pre>
+%%
+%% if <code>Exports</code> = <code>[E1, ..., Ek]</code>,
+%% <code>Attributes</code> = <code>[{K1, T1}, ..., {Km, Tm}]</code>,
+%% and <code>Definitions</code> = <code>[{V1, F1}, ..., {Vn,
+%% Fn}]</code>.
+%%
+%% <p><code>Name</code> and all the <code>Ki</code> must be atom
+%% literals, and all the <code>Ti</code> must be constant literals. All
+%% the <code>Vi</code> and <code>Ei</code> must have type
+%% <code>var</code> and represent function names. All the
+%% <code>Fi</code> must have type <code>'fun'</code>.</p>
+%%
+%% @see c_module/3
+%% @see module_name/1
+%% @see module_exports/1
+%% @see module_attrs/1
+%% @see module_defs/1
+%% @see module_vars/1
+%% @see ann_c_module/4
+%% @see ann_c_module/5
+%% @see update_c_module/5
+%% @see c_atom/1
+%% @see c_var/1
+%% @see c_fun/2
+%% @see is_literal/1
+
+c_module(Name, Exports, Attrs, Es) ->
+ #module{name = Name, exports = Exports, attrs = Attrs, defs = Es}.
+
+
+%% @spec ann_c_module(As::[term()], Name::cerl(), Exports,
+%% Definitions) -> cerl()
+%%
+%% Exports = [cerl()]
+%% Definitions = [{cerl(), cerl()}]
+%%
+%% @see c_module/3
+%% @see ann_c_module/5
+
+ann_c_module(As, Name, Exports, Es) ->
+ #module{name = Name, exports = Exports, attrs = [], defs = Es,
+ ann = As}.
+
+
+%% @spec ann_c_module(As::[term()], Name::cerl(), Exports,
+%% Attributes, Definitions) -> cerl()
+%%
+%% Exports = [cerl()]
+%% Attributes = [{cerl(), cerl()}]
+%% Definitions = [{cerl(), cerl()}]
+%%
+%% @see c_module/4
+%% @see ann_c_module/4
+
+ann_c_module(As, Name, Exports, Attrs, Es) ->
+ #module{name = Name, exports = Exports, attrs = Attrs, defs = Es,
+ ann = As}.
+
+
+%% @spec update_c_module(Old::cerl(), Name::cerl(), Exports,
+%% Attributes, Definitions) -> cerl()
+%%
+%% Exports = [cerl()]
+%% Attributes = [{cerl(), cerl()}]
+%% Definitions = [{cerl(), cerl()}]
+%%
+%% @see c_module/4
+
+update_c_module(Node, Name, Exports, Attrs, Es) ->
+ #module{name = Name, exports = Exports, attrs = Attrs, defs = Es,
+ ann = get_ann(Node)}.
+
+
+%% @spec is_c_module(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% module definition, otherwise <code>false</code>.
+%%
+%% @see type/1
+
+is_c_module(#module{}) ->
+ true;
+is_c_module(_) ->
+ false.
+
+
+%% @spec module_name(Node::cerl()) -> cerl()
+%%
+%% @doc Returns the name subtree of an abstract module definition.
+%%
+%% @see c_module/4
+
+module_name(Node) ->
+ Node#module.name.
+
+
+%% @spec module_exports(Node::cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of exports subtrees of an abstract module
+%% definition.
+%%
+%% @see c_module/4
+
+module_exports(Node) ->
+ Node#module.exports.
+
+
+%% @spec module_attrs(Node::cerl()) -> [{cerl(), cerl()}]
+%%
+%% @doc Returns the list of pairs of attribute key/value subtrees of
+%% an abstract module definition.
+%%
+%% @see c_module/4
+
+module_attrs(Node) ->
+ Node#module.attrs.
+
+
+%% @spec module_defs(Node::cerl()) -> [{cerl(), cerl()}]
+%%
+%% @doc Returns the list of function definitions of an abstract module
+%% definition.
+%%
+%% @see c_module/4
+
+module_defs(Node) ->
+ Node#module.defs.
+
+
+%% @spec module_vars(Node::cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of left-hand side function variable subtrees
+%% of an abstract module definition.
+%%
+%% @see c_module/4
+
+module_vars(Node) ->
+ [F || {F, _} <- module_defs(Node)].
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_int(Value::integer()) -> cerl()
+%%
+%%
+%% @doc Creates an abstract integer literal. The lexical
+%% representation is the canonical decimal numeral of
+%% <code>Value</code>.
+%%
+%% @see ann_c_int/2
+%% @see is_c_int/1
+%% @see int_val/1
+%% @see int_lit/1
+%% @see c_char/1
+
+c_int(Value) ->
+ #literal{val = Value}.
+
+
+%% @spec ann_c_int(As::[term()], Value::integer()) -> cerl()
+%% @see c_int/1
+
+ann_c_int(As, Value) ->
+ #literal{val = Value, ann = As}.
+
+
+%% @spec is_c_int(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents an
+%% integer literal, otherwise <code>false</code>.
+%% @see c_int/1
+
+is_c_int(#literal{val = V}) when integer(V) ->
+ true;
+is_c_int(_) ->
+ false.
+
+
+%% @spec int_val(cerl()) -> integer()
+%%
+%% @doc Returns the value represented by an integer literal node.
+%% @see c_int/1
+
+int_val(Node) ->
+ Node#literal.val.
+
+
+%% @spec int_lit(cerl()) -> string()
+%%
+%% @doc Returns the numeral string represented by an integer literal
+%% node.
+%% @see c_int/1
+
+int_lit(Node) ->
+ integer_to_list(int_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_float(Value::float()) -> cerl()
+%%
+%% @doc Creates an abstract floating-point literal. The lexical
+%% representation is the decimal floating-point numeral of
+%% <code>Value</code>.
+%%
+%% @see ann_c_float/2
+%% @see is_c_float/1
+%% @see float_val/1
+%% @see float_lit/1
+
+%% Note that not all floating-point numerals can be represented with
+%% full precision.
+
+c_float(Value) ->
+ #literal{val = Value}.
+
+
+%% @spec ann_c_float(As::[term()], Value::float()) -> cerl()
+%% @see c_float/1
+
+ann_c_float(As, Value) ->
+ #literal{val = Value, ann = As}.
+
+
+%% @spec is_c_float(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% floating-point literal, otherwise <code>false</code>.
+%% @see c_float/1
+
+is_c_float(#literal{val = V}) when float(V) ->
+ true;
+is_c_float(_) ->
+ false.
+
+
+%% @spec float_val(cerl()) -> float()
+%%
+%% @doc Returns the value represented by a floating-point literal
+%% node.
+%% @see c_float/1
+
+float_val(Node) ->
+ Node#literal.val.
+
+
+%% @spec float_lit(cerl()) -> string()
+%%
+%% @doc Returns the numeral string represented by a floating-point
+%% literal node.
+%% @see c_float/1
+
+float_lit(Node) ->
+ float_to_list(float_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_atom(Name) -> cerl()
+%% Name = atom() | string()
+%%
+%% @doc Creates an abstract atom literal. The print name of the atom
+%% is the character sequence represented by <code>Name</code>.
+%%
+%% <p>Note: passing a string as argument to this function causes a
+%% corresponding atom to be created for the internal representation.</p>
+%%
+%% @see ann_c_atom/2
+%% @see is_c_atom/1
+%% @see atom_val/1
+%% @see atom_name/1
+%% @see atom_lit/1
+
+c_atom(Name) when atom(Name) ->
+ #literal{val = Name};
+c_atom(Name) ->
+ #literal{val = list_to_atom(Name)}.
+
+
+%% @spec ann_c_atom(As::[term()], Name) -> cerl()
+%% Name = atom() | string()
+%% @see c_atom/1
+
+ann_c_atom(As, Name) when atom(Name) ->
+ #literal{val = Name, ann = As};
+ann_c_atom(As, Name) ->
+ #literal{val = list_to_atom(Name), ann = As}.
+
+
+%% @spec is_c_atom(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents an
+%% atom literal, otherwise <code>false</code>.
+%%
+%% @see c_atom/1
+
+is_c_atom(#literal{val = V}) when atom(V) ->
+ true;
+is_c_atom(_) ->
+ false.
+
+%% @spec atom_val(cerl())-> atom()
+%%
+%% @doc Returns the value represented by an abstract atom.
+%%
+%% @see c_atom/1
+
+atom_val(Node) ->
+ Node#literal.val.
+
+
+%% @spec atom_name(cerl()) -> string()
+%%
+%% @doc Returns the printname of an abstract atom.
+%%
+%% @see c_atom/1
+
+atom_name(Node) ->
+ atom_to_list(atom_val(Node)).
+
+
+%% @spec atom_lit(cerl()) -> string()
+%%
+%% @doc Returns the literal string represented by an abstract
+%% atom. This always includes surrounding single-quote characters.
+%%
+%% <p>Note that an abstract atom may have several literal
+%% representations, and that the representation yielded by this
+%% function is not fixed; e.g.,
+%% <code>atom_lit(c_atom("a\012b"))</code> could yield the string
+%% <code>"\'a\\nb\'"</code>.</p>
+%%
+%% @see c_atom/1
+
+%% TODO: replace the use of the unofficial 'write_string/2'.
+
+atom_lit(Node) ->
+ io_lib:write_string(atom_name(Node), $'). %' stupid Emacs.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_char(Value) -> cerl()
+%%
+%% Value = char() | integer()
+%%
+%% @doc Creates an abstract character literal. If the local
+%% implementation of Erlang defines <code>char()</code> as a subset of
+%% <code>integer()</code>, this function is equivalent to
+%% <code>c_int/1</code>. Otherwise, if the given value is an integer,
+%% it will be converted to the character with the corresponding
+%% code. The lexical representation of a character is
+%% "<code>$<em>Char</em></code>", where <code>Char</code> is a single
+%% printing character or an escape sequence.
+%%
+%% @see c_int/1
+%% @see c_string/1
+%% @see ann_c_char/2
+%% @see is_c_char/1
+%% @see char_val/1
+%% @see char_lit/1
+%% @see is_print_char/1
+
+c_char(Value) when integer(Value), Value >= 0 ->
+ #literal{val = Value}.
+
+
+%% @spec ann_c_char(As::[term()], Value::char()) -> cerl()
+%% @see c_char/1
+
+ann_c_char(As, Value) ->
+ #literal{val = Value, ann = As}.
+
+
+%% @spec is_c_char(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% character literal, otherwise <code>false</code>.
+%%
+%% <p>If the local implementation of Erlang defines
+%% <code>char()</code> as a subset of <code>integer()</code>, then
+%% <code>is_c_int(<em>Node</em>)</code> will also yield
+%% <code>true</code>.</p>
+%%
+%% @see c_char/1
+%% @see is_print_char/1
+
+is_c_char(#literal{val = V}) when integer(V), V >= 0 ->
+ is_char_value(V);
+is_c_char(_) ->
+ false.
+
+
+%% @spec is_print_char(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% "printing" character, otherwise <code>false</code>. (Cf.
+%% <code>is_c_char/1</code>.) A "printing" character has either a
+%% given graphical representation, or a "named" escape sequence such
+%% as "<code>\n</code>". Currently, only ISO 8859-1 (Latin-1)
+%% character values are recognized.
+%%
+%% @see c_char/1
+%% @see is_c_char/1
+
+is_print_char(#literal{val = V}) when integer(V), V >= 0 ->
+ is_print_char_value(V);
+is_print_char(_) ->
+ false.
+
+
+%% @spec char_val(cerl()) -> char()
+%%
+%% @doc Returns the value represented by an abstract character literal.
+%%
+%% @see c_char/1
+
+char_val(Node) ->
+ Node#literal.val.
+
+
+%% @spec char_lit(cerl()) -> string()
+%%
+%% @doc Returns the literal string represented by an abstract
+%% character. This includes a leading <code>$</code>
+%% character. Currently, all characters that are not in the set of ISO
+%% 8859-1 (Latin-1) "printing" characters will be escaped.
+%%
+%% @see c_char/1
+
+char_lit(Node) ->
+ io_lib:write_char(char_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_string(Value::string()) -> cerl()
+%%
+%% @doc Creates an abstract string literal. Equivalent to creating an
+%% abstract list of the corresponding character literals
+%% (cf. <code>is_c_string/1</code>), but is typically more
+%% efficient. The lexical representation of a string is
+%% "<code>"<em>Chars</em>"</code>", where <code>Chars</code> is a
+%% sequence of printing characters or spaces.
+%%
+%% @see c_char/1
+%% @see ann_c_string/2
+%% @see is_c_string/1
+%% @see string_val/1
+%% @see string_lit/1
+%% @see is_print_string/1
+
+c_string(Value) ->
+ #literal{val = Value}.
+
+
+%% @spec ann_c_string(As::[term()], Value::string()) -> cerl()
+%% @see c_string/1
+
+ann_c_string(As, Value) ->
+ #literal{val = Value, ann = As}.
+
+
+%% @spec is_c_string(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% string literal, otherwise <code>false</code>. Strings are defined
+%% as lists of characters; see <code>is_c_char/1</code> for details.
+%%
+%% @see c_string/1
+%% @see is_c_char/1
+%% @see is_print_string/1
+
+is_c_string(#literal{val = V}) ->
+ is_char_list(V);
+is_c_string(_) ->
+ false.
+
+
+%% @spec is_print_string(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% string literal containing only "printing" characters, otherwise
+%% <code>false</code>. See <code>is_c_string/1</code> and
+%% <code>is_print_char/1</code> for details. Currently, only ISO
+%% 8859-1 (Latin-1) character values are recognized.
+%%
+%% @see c_string/1
+%% @see is_c_string/1
+%% @see is_print_char/1
+
+is_print_string(#literal{val = V}) ->
+ is_print_char_list(V);
+is_print_string(_) ->
+ false.
+
+
+%% @spec string_val(cerl()) -> string()
+%%
+%% @doc Returns the value represented by an abstract string literal.
+%%
+%% @see c_string/1
+
+string_val(Node) ->
+ Node#literal.val.
+
+
+%% @spec string_lit(cerl()) -> string()
+%%
+%% @doc Returns the literal string represented by an abstract string.
+%% This includes surrounding double-quote characters
+%% <code>"..."</code>. Currently, characters that are not in the set
+%% of ISO 8859-1 (Latin-1) "printing" characters will be escaped,
+%% except for spaces.
+%%
+%% @see c_string/1
+
+string_lit(Node) ->
+ io_lib:write_string(string_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_nil() -> cerl()
+%%
+%% @doc Creates an abstract empty list. The result represents
+%% "<code>[]</code>". The empty list is traditionally called "nil".
+%%
+%% @see ann_c_nil/1
+%% @see is_c_list/1
+%% @see c_cons/2
+
+c_nil() ->
+ #literal{val = []}.
+
+
+%% @spec ann_c_nil(As::[term()]) -> cerl()
+%% @see c_nil/0
+
+ann_c_nil(As) ->
+ #literal{val = [], ann = As}.
+
+
+%% @spec is_c_nil(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% empty list, otherwise <code>false</code>.
+
+is_c_nil(#literal{val = []}) ->
+ true;
+is_c_nil(_) ->
+ false.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_cons(Head::cerl(), Tail::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract list constructor. The result represents
+%% "<code>[<em>Head</em> | <em>Tail</em>]</code>". Note that if both
+%% <code>Head</code> and <code>Tail</code> have type
+%% <code>literal</code>, then the result will also have type
+%% <code>literal</code>, and annotations on <code>Head</code> and
+%% <code>Tail</code> are lost.
+%%
+%% <p>Recall that in Erlang, the tail element of a list constructor is
+%% not necessarily a list.</p>
+%%
+%% @see ann_c_cons/3
+%% @see update_c_cons/3
+%% @see c_cons_skel/2
+%% @see is_c_cons/1
+%% @see cons_hd/1
+%% @see cons_tl/1
+%% @see is_c_list/1
+%% @see c_nil/0
+%% @see list_elements/1
+%% @see list_length/1
+%% @see make_list/2
+
+-record(cons, {ann = [], hd, tl}).
+
+%% *Always* collapse literals.
+
+c_cons(#literal{val = Head}, #literal{val = Tail}) ->
+ #literal{val = [Head | Tail]};
+c_cons(Head, Tail) ->
+ #cons{hd = Head, tl = Tail}.
+
+
+%% @spec ann_c_cons(As::[term()], Head::cerl(), Tail::cerl()) -> cerl()
+%% @see c_cons/2
+
+ann_c_cons(As, #literal{val = Head}, #literal{val = Tail}) ->
+ #literal{val = [Head | Tail], ann = As};
+ann_c_cons(As, Head, Tail) ->
+ #cons{hd = Head, tl = Tail, ann = As}.
+
+
+%% @spec update_c_cons(Old::cerl(), Head::cerl(), Tail::cerl()) ->
+%% cerl()
+%% @see c_cons/2
+
+update_c_cons(Node, #literal{val = Head}, #literal{val = Tail}) ->
+ #literal{val = [Head | Tail], ann = get_ann(Node)};
+update_c_cons(Node, Head, Tail) ->
+ #cons{hd = Head, tl = Tail, ann = get_ann(Node)}.
+
+
+%% @spec c_cons_skel(Head::cerl(), Tail::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract list constructor skeleton. Does not fold
+%% constant literals, i.e., the result always has type
+%% <code>cons</code>, representing "<code>[<em>Head</em> |
+%% <em>Tail</em>]</code>".
+%%
+%% <p>This function is occasionally useful when it is necessary to have
+%% annotations on the subnodes of a list constructor node, even when the
+%% subnodes are constant literals. Note however that
+%% <code>is_literal/1</code> will yield <code>false</code> and
+%% <code>concrete/1</code> will fail if passed the result from this
+%% function.</p>
+%%
+%% <p><code>fold_literal/1</code> can be used to revert a node to the
+%% normal-form representation.</p>
+%%
+%% @see ann_c_cons_skel/3
+%% @see update_c_cons_skel/3
+%% @see c_cons/2
+%% @see is_c_cons/1
+%% @see is_c_list/1
+%% @see c_nil/0
+%% @see is_literal/1
+%% @see fold_literal/1
+%% @see concrete/1
+
+%% *Never* collapse literals.
+
+c_cons_skel(Head, Tail) ->
+ #cons{hd = Head, tl = Tail}.
+
+
+%% @spec ann_c_cons_skel(As::[term()], Head::cerl(), Tail::cerl()) ->
+%% cerl()
+%% @see c_cons_skel/2
+
+ann_c_cons_skel(As, Head, Tail) ->
+ #cons{hd = Head, tl = Tail, ann = As}.
+
+
+%% @spec update_c_cons_skel(Old::cerl(), Head::cerl(), Tail::cerl()) ->
+%% cerl()
+%% @see c_cons_skel/2
+
+update_c_cons_skel(Node, Head, Tail) ->
+ #cons{hd = Head, tl = Tail, ann = get_ann(Node)}.
+
+
+%% @spec is_c_cons(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% list constructor, otherwise <code>false</code>.
+
+is_c_cons(#cons{}) ->
+ true;
+is_c_cons(#literal{val = [_ | _]}) ->
+ true;
+is_c_cons(_) ->
+ false.
+
+
+%% @spec cons_hd(cerl()) -> cerl()
+%%
+%% @doc Returns the head subtree of an abstract list constructor.
+%%
+%% @see c_cons/2
+
+cons_hd(#cons{hd = Head}) ->
+ Head;
+cons_hd(#literal{val = [Head | _]}) ->
+ #literal{val = Head}.
+
+
+%% @spec cons_tl(cerl()) -> cerl()
+%%
+%% @doc Returns the tail subtree of an abstract list constructor.
+%%
+%% <p>Recall that the tail does not necessarily represent a proper
+%% list.</p>
+%%
+%% @see c_cons/2
+
+cons_tl(#cons{tl = Tail}) ->
+ Tail;
+cons_tl(#literal{val = [_ | Tail]}) ->
+ #literal{val = Tail}.
+
+
+%% @spec is_c_list(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% proper list, otherwise <code>false</code>. A proper list is either
+%% the empty list <code>[]</code>, or a cons cell <code>[<em>Head</em> |
+%% <em>Tail</em>]</code>, where recursively <code>Tail</code> is a
+%% proper list.
+%%
+%% <p>Note: Because <code>Node</code> is a syntax tree, the actual
+%% run-time values corresponding to its subtrees may often be partially
+%% or completely unknown. Thus, if <code>Node</code> represents e.g.
+%% "<code>[... | Ns]</code>" (where <code>Ns</code> is a variable), then
+%% the function will return <code>false</code>, because it is not known
+%% whether <code>Ns</code> will be bound to a list at run-time. If
+%% <code>Node</code> instead represents e.g. "<code>[1, 2, 3]</code>" or
+%% "<code>[A | []]</code>", then the function will return
+%% <code>true</code>.</p>
+%%
+%% @see c_cons/2
+%% @see c_nil/0
+%% @see list_elements/1
+%% @see list_length/1
+
+is_c_list(#cons{tl = Tail}) ->
+ is_c_list(Tail);
+is_c_list(#literal{val = V}) ->
+ is_proper_list(V);
+is_c_list(_) ->
+ false.
+
+is_proper_list([_ | Tail]) ->
+ is_proper_list(Tail);
+is_proper_list([]) ->
+ true;
+is_proper_list(_) ->
+ false.
+
+%% @spec list_elements(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of element subtrees of an abstract list.
+%% <code>Node</code> must represent a proper list. E.g., if
+%% <code>Node</code> represents "<code>[<em>X1</em>, <em>X2</em> |
+%% [<em>X3</em>, <em>X4</em> | []]</code>", then
+%% <code>list_elements(Node)</code> yields the list <code>[X1, X2, X3,
+%% X4]</code>.
+%%
+%% @see c_cons/2
+%% @see c_nil/1
+%% @see is_c_list/1
+%% @see list_length/1
+%% @see make_list/2
+
+list_elements(#cons{hd = Head, tl = Tail}) ->
+ [Head | list_elements(Tail)];
+list_elements(#literal{val = V}) ->
+ abstract_list(V).
+
+abstract_list([X | Xs]) ->
+ [abstract(X) | abstract_list(Xs)];
+abstract_list([]) ->
+ [].
+
+
+%% @spec list_length(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of element subtrees of an abstract list.
+%% <code>Node</code> must represent a proper list. E.g., if
+%% <code>Node</code> represents "<code>[X1 | [X2, X3 | [X4, X5,
+%% X6]]]</code>", then <code>list_length(Node)</code> returns the
+%% integer 6.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(list_elements(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_cons/2
+%% @see c_nil/1
+%% @see is_c_list/1
+%% @see list_elements/1
+
+list_length(L) ->
+ list_length(L, 0).
+
+list_length(#cons{tl = Tail}, A) ->
+ list_length(Tail, A + 1);
+list_length(#literal{val = V}, A) ->
+ A + length(V).
+
+
+%% @spec make_list(List) -> Node
+%% @equiv make_list(List, none)
+
+make_list(List) ->
+ ann_make_list([], List).
+
+
+%% @spec make_list(List::[cerl()], Tail) -> cerl()
+%%
+%% Tail = cerl() | none
+%%
+%% @doc Creates an abstract list from the elements in <code>List</code>
+%% and the optional <code>Tail</code>. If <code>Tail</code> is
+%% <code>none</code>, the result will represent a nil-terminated list,
+%% otherwise it represents "<code>[... | <em>Tail</em>]</code>".
+%%
+%% @see c_cons/2
+%% @see c_nil/0
+%% @see ann_make_list/3
+%% @see update_list/3
+%% @see list_elements/1
+
+make_list(List, Tail) ->
+ ann_make_list([], List, Tail).
+
+
+%% @spec update_list(Old::cerl(), List::[cerl()]) -> cerl()
+%% @equiv update_list(Old, List, none)
+
+update_list(Node, List) ->
+ ann_make_list(get_ann(Node), List).
+
+
+%% @spec update_list(Old::cerl(), List::[cerl()], Tail) -> cerl()
+%%
+%% Tail = cerl() | none
+%%
+%% @see make_list/2
+%% @see update_list/2
+
+update_list(Node, List, Tail) ->
+ ann_make_list(get_ann(Node), List, Tail).
+
+
+%% @spec ann_make_list(As::[term()], List::[cerl()]) -> cerl()
+%% @equiv ann_make_list(As, List, none)
+
+ann_make_list(As, List) ->
+ ann_make_list(As, List, none).
+
+
+%% @spec ann_make_list(As::[term()], List::[cerl()], Tail) -> cerl()
+%%
+%% Tail = cerl() | none
+%%
+%% @see make_list/2
+%% @see ann_make_list/2
+
+ann_make_list(As, [H | T], Tail) ->
+ ann_c_cons(As, H, make_list(T, Tail)); % `c_cons' folds literals
+ann_make_list(As, [], none) ->
+ ann_c_nil(As);
+ann_make_list(_, [], Node) ->
+ Node.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_tuple(Elements::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract tuple. If <code>Elements</code> is
+%% <code>[E1, ..., En]</code>, the result represents
+%% "<code>{<em>E1</em>, ..., <em>En</em>}</code>". Note that if all
+%% nodes in <code>Elements</code> have type <code>literal</code>, or if
+%% <code>Elements</code> is empty, then the result will also have type
+%% <code>literal</code> and annotations on nodes in
+%% <code>Elements</code> are lost.
+%%
+%% <p>Recall that Erlang has distinct 1-tuples, i.e., <code>{X}</code>
+%% is always distinct from <code>X</code> itself.</p>
+%%
+%% @see ann_c_tuple/2
+%% @see update_c_tuple/2
+%% @see is_c_tuple/1
+%% @see tuple_es/1
+%% @see tuple_arity/1
+%% @see c_tuple_skel/1
+
+-record(tuple, {ann = [], es}).
+
+%% *Always* collapse literals.
+
+c_tuple(Es) ->
+ case is_lit_list(Es) of
+ false ->
+ #tuple{es = Es};
+ true ->
+ #literal{val = list_to_tuple(lit_list_vals(Es))}
+ end.
+
+
+%% @spec ann_c_tuple(As::[term()], Elements::[cerl()]) -> cerl()
+%% @see c_tuple/1
+
+ann_c_tuple(As, Es) ->
+ case is_lit_list(Es) of
+ false ->
+ #tuple{es = Es, ann = As};
+ true ->
+ #literal{val = list_to_tuple(lit_list_vals(Es)), ann = As}
+ end.
+
+
+%% @spec update_c_tuple(Old::cerl(), Elements::[cerl()]) -> cerl()
+%% @see c_tuple/1
+
+update_c_tuple(Node, Es) ->
+ case is_lit_list(Es) of
+ false ->
+ #tuple{es = Es, ann = get_ann(Node)};
+ true ->
+ #literal{val = list_to_tuple(lit_list_vals(Es)),
+ ann = get_ann(Node)}
+ end.
+
+
+%% @spec c_tuple_skel(Elements::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract tuple skeleton. Does not fold constant
+%% literals, i.e., the result always has type <code>tuple</code>,
+%% representing "<code>{<em>E1</em>, ..., <em>En</em>}</code>", if
+%% <code>Elements</code> is <code>[E1, ..., En]</code>.
+%%
+%% <p>This function is occasionally useful when it is necessary to have
+%% annotations on the subnodes of a tuple node, even when all the
+%% subnodes are constant literals. Note however that
+%% <code>is_literal/1</code> will yield <code>false</code> and
+%% <code>concrete/1</code> will fail if passed the result from this
+%% function.</p>
+%%
+%% <p><code>fold_literal/1</code> can be used to revert a node to the
+%% normal-form representation.</p>
+%%
+%% @see ann_c_tuple_skel/2
+%% @see update_c_tuple_skel/2
+%% @see c_tuple/1
+%% @see tuple_es/1
+%% @see is_c_tuple/1
+%% @see is_literal/1
+%% @see fold_literal/1
+%% @see concrete/1
+
+%% *Never* collapse literals.
+
+c_tuple_skel(Es) ->
+ #tuple{es = Es}.
+
+
+%% @spec ann_c_tuple_skel(As::[term()], Elements::[cerl()]) -> cerl()
+%% @see c_tuple_skel/1
+
+ann_c_tuple_skel(As, Es) ->
+ #tuple{es = Es, ann = As}.
+
+
+%% @spec update_c_tuple_skel(Old::cerl(), Elements::[cerl()]) -> cerl()
+%% @see c_tuple_skel/1
+
+update_c_tuple_skel(Old, Es) ->
+ #tuple{es = Es, ann = get_ann(Old)}.
+
+
+%% @spec is_c_tuple(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% tuple, otherwise <code>false</code>.
+%%
+%% @see c_tuple/1
+
+is_c_tuple(#tuple{}) ->
+ true;
+is_c_tuple(#literal{val = V}) when tuple(V) ->
+ true;
+is_c_tuple(_) ->
+ false.
+
+
+%% @spec tuple_es(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of element subtrees of an abstract tuple.
+%%
+%% @see c_tuple/1
+
+tuple_es(#tuple{es = Es}) ->
+ Es;
+tuple_es(#literal{val = V}) ->
+ make_lit_list(tuple_to_list(V)).
+
+
+%% @spec tuple_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of element subtrees of an abstract tuple.
+%%
+%% <p>Note: this is equivalent to <code>length(tuple_es(Node))</code>,
+%% but potentially more efficient.</p>
+%%
+%% @see tuple_es/1
+%% @see c_tuple/1
+
+tuple_arity(#tuple{es = Es}) ->
+ length(Es);
+tuple_arity(#literal{val = V}) when tuple(V) ->
+ size(V).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_var(Name::var_name()) -> cerl()
+%%
+%% var_name() = integer() | atom() | {atom(), integer()}
+%%
+%% @doc Creates an abstract variable. A variable is identified by its
+%% name, given by the <code>Name</code> parameter.
+%%
+%% <p>If a name is given by a single atom, it should either be a
+%% "simple" atom which does not need to be single-quoted in Erlang, or
+%% otherwise its print name should correspond to a proper Erlang
+%% variable, i.e., begin with an uppercase character or an
+%% underscore. Names on the form <code>{A, N}</code> represent
+%% function name variables "<code><em>A</em>/<em>N</em></code>"; these
+%% are special variables which may be bound only in the function
+%% definitions of a module or a <code>letrec</code>. They may not be
+%% bound in <code>let</code> expressions and cannot occur in clause
+%% patterns. The atom <code>A</code> in a function name may be any
+%% atom; the integer <code>N</code> must be nonnegative. The functions
+%% <code>c_fname/2</code> etc. are utilities for handling function
+%% name variables.</p>
+%%
+%% <p>When printing variable names, they must have the form of proper
+%% Core Erlang variables and function names. E.g., a name represented
+%% by an integer such as <code>42</code> could be formatted as
+%% "<code>_42</code>", an atom <code>'Xxx'</code> simply as
+%% "<code>Xxx</code>", and an atom <code>foo</code> as
+%% "<code>_foo</code>". However, one must assure that any two valid
+%% distinct names are never mapped to the same strings. Tuples such
+%% as <code>{foo, 2}</code> representing function names can simply by
+%% formatted as "<code>'foo'/2</code>", with no risk of conflicts.</p>
+%%
+%% @see ann_c_var/2
+%% @see update_c_var/2
+%% @see is_c_var/1
+%% @see var_name/1
+%% @see c_fname/2
+%% @see c_module/4
+%% @see c_letrec/2
+
+-record(var, {ann = [], name}).
+
+c_var(Name) ->
+ #var{name = Name}.
+
+
+%% @spec ann_c_var(As::[term()], Name::var_name()) -> cerl()
+%%
+%% @see c_var/1
+
+ann_c_var(As, Name) ->
+ #var{name = Name, ann = As}.
+
+%% @spec update_c_var(Old::cerl(), Name::var_name()) -> cerl()
+%%
+%% @see c_var/1
+
+update_c_var(Node, Name) ->
+ #var{name = Name, ann = get_ann(Node)}.
+
+
+%% @spec is_c_var(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% variable, otherwise <code>false</code>.
+%%
+%% @see c_var/1
+
+is_c_var(#var{}) ->
+ true;
+is_c_var(_) ->
+ false.
+
+
+%% @spec c_fname(Name::atom(), Arity::integer()) -> cerl()
+%% @equiv c_var({Name, Arity})
+%% @see fname_id/1
+%% @see fname_arity/1
+%% @see is_c_fname/1
+%% @see ann_c_fname/3
+%% @see update_c_fname/3
+
+c_fname(Atom, Arity) ->
+ c_var({Atom, Arity}).
+
+
+%% @spec ann_c_fname(As::[term()], Name::atom(), Arity::integer()) ->
+%% cerl()
+%% @equiv ann_c_var(As, {Atom, Arity})
+%% @see c_fname/2
+
+ann_c_fname(As, Atom, Arity) ->
+ ann_c_var(As, {Atom, Arity}).
+
+
+%% @spec update_c_fname(Old::cerl(), Name::atom()) -> cerl()
+%% @doc Like <code>update_c_fname/3</code>, but takes the arity from
+%% <code>Node</code>.
+%% @see update_c_fname/3
+%% @see c_fname/2
+
+update_c_fname(#var{name = {_, Arity}, ann = As}, Atom) ->
+ #var{name = {Atom, Arity}, ann = As}.
+
+
+%% @spec update_c_fname(Old::cerl(), Name::atom(), Arity::integer()) ->
+%% cerl()
+%% @equiv update_c_var(Old, {Atom, Arity})
+%% @see update_c_fname/2
+%% @see c_fname/2
+
+update_c_fname(Node, Atom, Arity) ->
+ update_c_var(Node, {Atom, Arity}).
+
+
+%% @spec is_c_fname(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% function name variable, otherwise <code>false</code>.
+%%
+%% @see c_fname/2
+%% @see c_var/1
+%% @see c_var_name/1
+
+is_c_fname(#var{name = {A, N}}) when atom(A), integer(N), N >= 0 ->
+ true;
+is_c_fname(_) ->
+ false.
+
+
+%% @spec var_name(cerl()) -> var_name()
+%%
+%% @doc Returns the name of an abstract variable.
+%%
+%% @see c_var/1
+
+var_name(Node) ->
+ Node#var.name.
+
+
+%% @spec fname_id(cerl()) -> atom()
+%%
+%% @doc Returns the identifier part of an abstract function name
+%% variable.
+%%
+%% @see fname_arity/1
+%% @see c_fname/2
+
+fname_id(#var{name={A,_}}) ->
+ A.
+
+
+%% @spec fname_arity(cerl()) -> integer()
+%%
+%% @doc Returns the arity part of an abstract function name variable.
+%%
+%% @see fname_id/1
+%% @see c_fname/2
+
+fname_arity(#var{name={_,N}}) ->
+ N.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_values(Elements::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract value list. If <code>Elements</code> is
+%% <code>[E1, ..., En]</code>, the result represents
+%% "<code>&lt;<em>E1</em>, ..., <em>En</em>&gt;</code>".
+%%
+%% @see ann_c_values/2
+%% @see update_c_values/2
+%% @see is_c_values/1
+%% @see values_es/1
+%% @see values_arity/1
+
+-record(values, {ann = [], es}).
+
+c_values(Es) ->
+ #values{es = Es}.
+
+
+%% @spec ann_c_values(As::[term()], Elements::[cerl()]) -> cerl()
+%% @see c_values/1
+
+ann_c_values(As, Es) ->
+ #values{es = Es, ann = As}.
+
+
+%% @spec update_c_values(Old::cerl(), Elements::[cerl()]) -> cerl()
+%% @see c_values/1
+
+update_c_values(Node, Es) ->
+ #values{es = Es, ann = get_ann(Node)}.
+
+
+%% @spec is_c_values(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% value list; otherwise <code>false</code>.
+%%
+%% @see c_values/1
+
+is_c_values(#values{}) ->
+ true;
+is_c_values(_) ->
+ false.
+
+
+%% @spec values_es(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of element subtrees of an abstract value
+%% list.
+%%
+%% @see c_values/1
+%% @see values_arity/1
+
+values_es(Node) ->
+ Node#values.es.
+
+
+%% @spec values_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of element subtrees of an abstract value
+%% list.
+%%
+%% <p>Note: This is equivalent to
+%% <code>length(values_es(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_values/1
+%% @see values_es/1
+
+values_arity(Node) ->
+ length(values_es(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_binary(Segments::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract binary-template. A binary object is a
+%% sequence of 8-bit bytes. It is specified by zero or more bit-string
+%% template <em>segments</em> of arbitrary lengths (in number of bits),
+%% such that the sum of the lengths is evenly divisible by 8. If
+%% <code>Segments</code> is <code>[S1, ..., Sn]</code>, the result
+%% represents "<code>#{<em>S1</em>, ..., <em>Sn</em>}#</code>". All the
+%% <code>Si</code> must have type <code>bitstr</code>.
+%%
+%% @see ann_c_binary/2
+%% @see update_c_binary/2
+%% @see is_c_binary/1
+%% @see binary_segments/1
+%% @see c_bitstr/5
+
+-record(binary, {ann = [], segments}).
+
+c_binary(Segments) ->
+ #binary{segments = Segments}.
+
+
+%% @spec ann_c_binary(As::[term()], Segments::[cerl()]) -> cerl()
+%% @see c_binary/1
+
+ann_c_binary(As, Segments) ->
+ #binary{segments = Segments, ann = As}.
+
+
+%% @spec update_c_binary(Old::cerl(), Segments::[cerl()]) -> cerl()
+%% @see c_binary/1
+
+update_c_binary(Node, Segments) ->
+ #binary{segments = Segments, ann = get_ann(Node)}.
+
+
+%% @spec is_c_binary(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% binary-template; otherwise <code>false</code>.
+%%
+%% @see c_binary/1
+
+is_c_binary(#binary{}) ->
+ true;
+is_c_binary(_) ->
+ false.
+
+
+%% @spec binary_segments(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of segment subtrees of an abstract
+%% binary-template.
+%%
+%% @see c_binary/1
+%% @see c_bitstr/5
+
+binary_segments(Node) ->
+ Node#binary.segments.
+
+
+%% @spec c_bitstr(Value::cerl(), Size::cerl(), Unit::cerl(),
+%% Type::cerl(), Flags::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract bit-string template. These can only occur as
+%% components of an abstract binary-template (see {@link c_binary/1}).
+%% The result represents "<code>#&lt;<em>Value</em>&gt;(<em>Size</em>,
+%% <em>Unit</em>, <em>Type</em>, <em>Flags</em>)</code>", where
+%% <code>Unit</code> must represent a positive integer constant,
+%% <code>Type</code> must represent a constant atom (one of
+%% <code>'integer'</code>, <code>'float'</code>, or
+%% <code>'binary'</code>), and <code>Flags</code> must represent a
+%% constant list <code>"[<em>F1</em>, ..., <em>Fn</em>]"</code> where
+%% all the <code>Fi</code> are atoms.
+%%
+%% @see c_binary/1
+%% @see ann_c_bitstr/6
+%% @see update_c_bitstr/6
+%% @see is_c_bitstr/1
+%% @see bitstr_val/1
+%% @see bitstr_size/1
+%% @see bitstr_unit/1
+%% @see bitstr_type/1
+%% @see bitstr_flags/1
+
+-record(bitstr, {ann = [], val, size, unit, type, flags}).
+
+c_bitstr(Val, Size, Unit, Type, Flags) ->
+ #bitstr{val = Val, size = Size, unit = Unit, type = Type,
+ flags = Flags}.
+
+
+%% @spec c_bitstr(Value::cerl(), Size::cerl(), Type::cerl(),
+%% Flags::cerl()) -> cerl()
+%% @equiv c_bitstr(Value, Size, abstract(1), Type, Flags)
+
+c_bitstr(Val, Size, Type, Flags) ->
+ c_bitstr(Val, Size, abstract(1), Type, Flags).
+
+
+%% @spec c_bitstr(Value::cerl(), Type::cerl(),
+%% Flags::cerl()) -> cerl()
+%% @equiv c_bitstr(Value, abstract(all), abstract(1), Type, Flags)
+
+c_bitstr(Val, Type, Flags) ->
+ c_bitstr(Val, abstract(all), abstract(1), Type, Flags).
+
+
+%% @spec ann_c_bitstr(As::[term()], Value::cerl(), Size::cerl(),
+%% Unit::cerl(), Type::cerl(), Flags::cerl()) -> cerl()
+%% @see c_bitstr/5
+%% @see ann_c_bitstr/5
+
+ann_c_bitstr(As, Val, Size, Unit, Type, Flags) ->
+ #bitstr{val = Val, size = Size, unit = Unit, type = Type,
+ flags = Flags, ann = As}.
+
+%% @spec ann_c_bitstr(As::[term()], Value::cerl(), Size::cerl(),
+%% Type::cerl(), Flags::cerl()) -> cerl()
+%% @equiv ann_c_bitstr(As, Value, Size, abstract(1), Type, Flags)
+
+ann_c_bitstr(As, Value, Size, Type, Flags) ->
+ ann_c_bitstr(As, Value, Size, abstract(1), Type, Flags).
+
+
+%% @spec update_c_bitstr(Old::cerl(), Value::cerl(), Size::cerl(),
+%% Unit::cerl(), Type::cerl(), Flags::cerl()) -> cerl()
+%% @see c_bitstr/5
+%% @see update_c_bitstr/5
+
+update_c_bitstr(Node, Val, Size, Unit, Type, Flags) ->
+ #bitstr{val = Val, size = Size, unit = Unit, type = Type,
+ flags = Flags, ann = get_ann(Node)}.
+
+
+%% @spec update_c_bitstr(Old::cerl(), Value::cerl(), Size::cerl(),
+%% Type::cerl(), Flags::cerl()) -> cerl()
+%% @equiv update_c_bitstr(Node, Value, Size, abstract(1), Type, Flags)
+
+update_c_bitstr(Node, Value, Size, Type, Flags) ->
+ update_c_bitstr(Node, Value, Size, abstract(1), Type, Flags).
+
+%% @spec is_c_bitstr(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% bit-string template; otherwise <code>false</code>.
+%%
+%% @see c_bitstr/5
+
+is_c_bitstr(#bitstr{}) ->
+ true;
+is_c_bitstr(_) ->
+ false.
+
+
+%% @spec bitstr_val(cerl()) -> cerl()
+%%
+%% @doc Returns the value subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+bitstr_val(Node) ->
+ Node#bitstr.val.
+
+
+%% @spec bitstr_size(cerl()) -> cerl()
+%%
+%% @doc Returns the size subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+bitstr_size(Node) ->
+ Node#bitstr.size.
+
+
+%% @spec bitstr_bitsize(cerl()) -> integer() | any | all
+%%
+%% @doc Returns the total size in bits of an abstract bit-string
+%% template. If the size field is an integer literal, the result is the
+%% product of the size and unit values; if the size field is the atom
+%% literal <code>all</code>, the atom <code>all</code> is returned; in
+%% all other cases, the atom <code>any</code> is returned.
+%%
+%% @see c_bitstr/5
+
+bitstr_bitsize(Node) ->
+ Size = Node#bitstr.size,
+ case is_literal(Size) of
+ true ->
+ case concrete(Size) of
+ all ->
+ all;
+ S when integer(S) ->
+ S*concrete(Node#bitstr.unit);
+ true ->
+ any
+ end;
+ false ->
+ any
+ end.
+
+
+%% @spec bitstr_unit(cerl()) -> cerl()
+%%
+%% @doc Returns the unit subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+bitstr_unit(Node) ->
+ Node#bitstr.unit.
+
+
+%% @spec bitstr_type(cerl()) -> cerl()
+%%
+%% @doc Returns the type subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+bitstr_type(Node) ->
+ Node#bitstr.type.
+
+
+%% @spec bitstr_flags(cerl()) -> cerl()
+%%
+%% @doc Returns the flags subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+bitstr_flags(Node) ->
+ Node#bitstr.flags.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_fun(Variables::[cerl()], Body::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract fun-expression. If <code>Variables</code>
+%% is <code>[V1, ..., Vn]</code>, the result represents "<code>fun
+%% (<em>V1</em>, ..., <em>Vn</em>) -> <em>Body</em></code>". All the
+%% <code>Vi</code> must have type <code>var</code>.
+%%
+%% @see ann_c_fun/3
+%% @see update_c_fun/3
+%% @see is_c_fun/1
+%% @see fun_vars/1
+%% @see fun_body/1
+%% @see fun_arity/1
+
+-record('fun', {ann = [], vars, body}).
+
+c_fun(Variables, Body) ->
+ #'fun'{vars = Variables, body = Body}.
+
+
+%% @spec ann_c_fun(As::[term()], Variables::[cerl()], Body::cerl()) ->
+%% cerl()
+%% @see c_fun/2
+
+ann_c_fun(As, Variables, Body) ->
+ #'fun'{vars = Variables, body = Body, ann = As}.
+
+
+%% @spec update_c_fun(Old::cerl(), Variables::[cerl()],
+%% Body::cerl()) -> cerl()
+%% @see c_fun/2
+
+update_c_fun(Node, Variables, Body) ->
+ #'fun'{vars = Variables, body = Body, ann = get_ann(Node)}.
+
+
+%% @spec is_c_fun(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% fun-expression, otherwise <code>false</code>.
+%%
+%% @see c_fun/2
+
+is_c_fun(#'fun'{}) ->
+ true; % Now this is fun!
+is_c_fun(_) ->
+ false.
+
+
+%% @spec fun_vars(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of parameter subtrees of an abstract
+%% fun-expression.
+%%
+%% @see c_fun/2
+%% @see fun_arity/1
+
+fun_vars(Node) ->
+ Node#'fun'.vars.
+
+
+%% @spec fun_body(cerl()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract fun-expression.
+%%
+%% @see c_fun/2
+
+fun_body(Node) ->
+ Node#'fun'.body.
+
+
+%% @spec fun_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of parameter subtrees of an abstract
+%% fun-expression.
+%%
+%% <p>Note: this is equivalent to <code>length(fun_vars(Node))</code>,
+%% but potentially more efficient.</p>
+%%
+%% @see c_fun/2
+%% @see fun_vars/1
+
+fun_arity(Node) ->
+ length(fun_vars(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_seq(Argument::cerl(), Body::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract sequencing expression. The result
+%% represents "<code>do <em>Argument</em> <em>Body</em></code>".
+%%
+%% @see ann_c_seq/3
+%% @see update_c_seq/3
+%% @see is_c_seq/1
+%% @see seq_arg/1
+%% @see seq_body/1
+
+-record(seq, {ann = [], arg, body}).
+
+c_seq(Argument, Body) ->
+ #seq{arg = Argument, body = Body}.
+
+
+%% @spec ann_c_seq(As::[term()], Argument::cerl(), Body::cerl()) ->
+%% cerl()
+%% @see c_seq/2
+
+ann_c_seq(As, Argument, Body) ->
+ #seq{arg = Argument, body = Body, ann = As}.
+
+
+%% @spec update_c_seq(Old::cerl(), Argument::cerl(), Body::cerl()) ->
+%% cerl()
+%% @see c_seq/2
+
+update_c_seq(Node, Argument, Body) ->
+ #seq{arg = Argument, body = Body, ann = get_ann(Node)}.
+
+
+%% @spec is_c_seq(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% sequencing expression, otherwise <code>false</code>.
+%%
+%% @see c_seq/2
+
+is_c_seq(#seq{}) ->
+ true;
+is_c_seq(_) ->
+ false.
+
+
+%% @spec seq_arg(cerl()) -> cerl()
+%%
+%% @doc Returns the argument subtree of an abstract sequencing
+%% expression.
+%%
+%% @see c_seq/2
+
+seq_arg(Node) ->
+ Node#seq.arg.
+
+
+%% @spec seq_body(cerl()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract sequencing expression.
+%%
+%% @see c_seq/2
+
+seq_body(Node) ->
+ Node#seq.body.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_let(Variables::[cerl()], Argument::cerl(), Body::cerl()) ->
+%% cerl()
+%%
+%% @doc Creates an abstract let-expression. If <code>Variables</code>
+%% is <code>[V1, ..., Vn]</code>, the result represents "<code>let
+%% &lt;<em>V1</em>, ..., <em>Vn</em>&gt; = <em>Argument</em> in
+%% <em>Body</em></code>". All the <code>Vi</code> must have type
+%% <code>var</code>.
+%%
+%% @see ann_c_let/4
+%% @see update_c_let/4
+%% @see is_c_let/1
+%% @see let_vars/1
+%% @see let_arg/1
+%% @see let_body/1
+%% @see let_arity/1
+
+-record('let', {ann = [], vars, arg, body}).
+
+c_let(Variables, Argument, Body) ->
+ #'let'{vars = Variables, arg = Argument, body = Body}.
+
+
+%% ann_c_let(As, Variables, Argument, Body) -> Node
+%% @see c_let/3
+
+ann_c_let(As, Variables, Argument, Body) ->
+ #'let'{vars = Variables, arg = Argument, body = Body, ann = As}.
+
+
+%% update_c_let(Old, Variables, Argument, Body) -> Node
+%% @see c_let/3
+
+update_c_let(Node, Variables, Argument, Body) ->
+ #'let'{vars = Variables, arg = Argument, body = Body,
+ ann = get_ann(Node)}.
+
+
+%% @spec is_c_let(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% let-expression, otherwise <code>false</code>.
+%%
+%% @see c_let/3
+
+is_c_let(#'let'{}) ->
+ true;
+is_c_let(_) ->
+ false.
+
+
+%% @spec let_vars(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of left-hand side variables of an abstract
+%% let-expression.
+%%
+%% @see c_let/3
+%% @see let_arity/1
+
+let_vars(Node) ->
+ Node#'let'.vars.
+
+
+%% @spec let_arg(cerl()) -> cerl()
+%%
+%% @doc Returns the argument subtree of an abstract let-expression.
+%%
+%% @see c_let/3
+
+let_arg(Node) ->
+ Node#'let'.arg.
+
+
+%% @spec let_body(cerl()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract let-expression.
+%%
+%% @see c_let/3
+
+let_body(Node) ->
+ Node#'let'.body.
+
+
+%% @spec let_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of left-hand side variables of an abstract
+%% let-expression.
+%%
+%% <p>Note: this is equivalent to <code>length(let_vars(Node))</code>,
+%% but potentially more efficient.</p>
+%%
+%% @see c_let/3
+%% @see let_vars/1
+
+let_arity(Node) ->
+ length(let_vars(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_letrec(Definitions::[{cerl(), cerl()}], Body::cerl()) ->
+%% cerl()
+%%
+%% @doc Creates an abstract letrec-expression. If
+%% <code>Definitions</code> is <code>[{V1, F1}, ..., {Vn, Fn}]</code>,
+%% the result represents "<code>letrec <em>V1</em> = <em>F1</em>
+%% ... <em>Vn</em> = <em>Fn</em> in <em>Body</em></code>. All the
+%% <code>Vi</code> must have type <code>var</code> and represent
+%% function names. All the <code>Fi</code> must have type
+%% <code>'fun'</code>.
+%%
+%% @see ann_c_letrec/3
+%% @see update_c_letrec/3
+%% @see is_c_letrec/1
+%% @see letrec_defs/1
+%% @see letrec_body/1
+%% @see letrec_vars/1
+
+-record(letrec, {ann = [], defs, body}).
+
+c_letrec(Defs, Body) ->
+ #letrec{defs = Defs, body = Body}.
+
+
+%% @spec ann_c_letrec(As::[term()], Definitions::[{cerl(), cerl()}],
+%% Body::cerl()) -> cerl()
+%% @see c_letrec/2
+
+ann_c_letrec(As, Defs, Body) ->
+ #letrec{defs = Defs, body = Body, ann = As}.
+
+
+%% @spec update_c_letrec(Old::cerl(),
+%% Definitions::[{cerl(), cerl()}],
+%% Body::cerl()) -> cerl()
+%% @see c_letrec/2
+
+update_c_letrec(Node, Defs, Body) ->
+ #letrec{defs = Defs, body = Body, ann = get_ann(Node)}.
+
+
+%% @spec is_c_letrec(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% letrec-expression, otherwise <code>false</code>.
+%%
+%% @see c_letrec/2
+
+is_c_letrec(#letrec{}) ->
+ true;
+is_c_letrec(_) ->
+ false.
+
+
+%% @spec letrec_defs(Node::cerl()) -> [{cerl(), cerl()}]
+%%
+%% @doc Returns the list of definitions of an abstract
+%% letrec-expression. If <code>Node</code> represents "<code>letrec
+%% <em>V1</em> = <em>F1</em> ... <em>Vn</em> = <em>Fn</em> in
+%% <em>Body</em></code>", the returned value is <code>[{V1, F1}, ...,
+%% {Vn, Fn}]</code>.
+%%
+%% @see c_letrec/2
+
+letrec_defs(Node) ->
+ Node#letrec.defs.
+
+
+%% @spec letrec_body(cerl()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract letrec-expression.
+%%
+%% @see c_letrec/2
+
+letrec_body(Node) ->
+ Node#letrec.body.
+
+
+%% @spec letrec_vars(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of left-hand side function variable subtrees
+%% of a letrec-expression. If <code>Node</code> represents
+%% "<code>letrec <em>V1</em> = <em>F1</em> ... <em>Vn</em> =
+%% <em>Fn</em> in <em>Body</em></code>", the returned value is
+%% <code>[V1, ..., Vn]</code>.
+%%
+%% @see c_letrec/2
+
+letrec_vars(Node) ->
+ [F || {F, _} <- letrec_defs(Node)].
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_case(Argument::cerl(), Clauses::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract case-expression. If <code>Clauses</code>
+%% is <code>[C1, ..., Cn]</code>, the result represents "<code>case
+%% <em>Argument</em> of <em>C1</em> ... <em>Cn</em>
+%% end</code>". <code>Clauses</code> must not be empty.
+%%
+%% @see ann_c_case/3
+%% @see update_c_case/3
+%% @see is_c_case/1
+%% @see c_clause/3
+%% @see case_arg/1
+%% @see case_clauses/1
+%% @see case_arity/1
+
+-record('case', {ann = [], arg, clauses}).
+
+c_case(Expr, Clauses) ->
+ #'case'{arg = Expr, clauses = Clauses}.
+
+
+%% @spec ann_c_case(As::[term()], Argument::cerl(),
+%% Clauses::[cerl()]) -> cerl()
+%% @see c_case/2
+
+ann_c_case(As, Expr, Clauses) ->
+ #'case'{arg = Expr, clauses = Clauses, ann = As}.
+
+
+%% @spec update_c_case(Old::cerl(), Argument::cerl(),
+%% Clauses::[cerl()]) -> cerl()
+%% @see c_case/2
+
+update_c_case(Node, Expr, Clauses) ->
+ #'case'{arg = Expr, clauses = Clauses, ann = get_ann(Node)}.
+
+
+%% is_c_case(Node) -> boolean()
+%%
+%% Node = cerl()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% case-expression; otherwise <code>false</code>.
+%%
+%% @see c_case/2
+
+is_c_case(#'case'{}) ->
+ true;
+is_c_case(_) ->
+ false.
+
+
+%% @spec case_arg(cerl()) -> cerl()
+%%
+%% @doc Returns the argument subtree of an abstract case-expression.
+%%
+%% @see c_case/2
+
+case_arg(Node) ->
+ Node#'case'.arg.
+
+
+%% @spec case_clauses(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of clause subtrees of an abstract
+%% case-expression.
+%%
+%% @see c_case/2
+%% @see case_arity/1
+
+case_clauses(Node) ->
+ Node#'case'.clauses.
+
+
+%% @spec case_arity(Node::cerl()) -> integer()
+%%
+%% @doc Equivalent to
+%% <code>clause_arity(hd(case_clauses(Node)))</code>, but potentially
+%% more efficient.
+%%
+%% @see c_case/2
+%% @see case_clauses/1
+%% @see clause_arity/1
+
+case_arity(Node) ->
+ clause_arity(hd(case_clauses(Node))).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_clause(Patterns::[cerl()], Body::cerl()) -> cerl()
+%% @equiv c_clause(Patterns, c_atom(true), Body)
+%% @see c_atom/1
+
+c_clause(Patterns, Body) ->
+ c_clause(Patterns, c_atom(true), Body).
+
+
+%% @spec c_clause(Patterns::[cerl()], Guard::cerl(), Body::cerl()) ->
+%% cerl()
+%%
+%% @doc Creates an an abstract clause. If <code>Patterns</code> is
+%% <code>[P1, ..., Pn]</code>, the result represents
+%% "<code>&lt;<em>P1</em>, ..., <em>Pn</em>&gt; when <em>Guard</em> ->
+%% <em>Body</em></code>".
+%%
+%% @see c_clause/2
+%% @see ann_c_clause/4
+%% @see update_c_clause/4
+%% @see is_c_clause/1
+%% @see c_case/2
+%% @see c_receive/3
+%% @see clause_pats/1
+%% @see clause_guard/1
+%% @see clause_body/1
+%% @see clause_arity/1
+%% @see clause_vars/1
+
+-record(clause, {ann = [], pats, guard, body}).
+
+c_clause(Patterns, Guard, Body) ->
+ #clause{pats = Patterns, guard = Guard, body = Body}.
+
+
+%% @spec ann_c_clause(As::[term()], Patterns::[cerl()],
+%% Body::cerl()) -> cerl()
+%% @equiv ann_c_clause(As, Patterns, c_atom(true), Body)
+%% @see c_clause/3
+ann_c_clause(As, Patterns, Body) ->
+ ann_c_clause(As, Patterns, c_atom(true), Body).
+
+
+%% @spec ann_c_clause(As::[term()], Patterns::[cerl()], Guard::cerl(),
+%% Body::cerl()) -> cerl()
+%% @see ann_c_clause/3
+%% @see c_clause/3
+
+ann_c_clause(As, Patterns, Guard, Body) ->
+ #clause{pats = Patterns, guard = Guard, body = Body, ann = As}.
+
+
+%% @spec update_c_clause(Old::cerl(), Patterns::[cerl()],
+%% Guard::cerl(), Body::cerl()) -> cerl()
+%% @see c_clause/3
+
+update_c_clause(Node, Patterns, Guard, Body) ->
+ #clause{pats = Patterns, guard = Guard, body = Body,
+ ann = get_ann(Node)}.
+
+
+%% @spec is_c_clause(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% clause, otherwise <code>false</code>.
+%%
+%% @see c_clause/3
+
+is_c_clause(#clause{}) ->
+ true;
+is_c_clause(_) ->
+ false.
+
+
+%% @spec clause_pats(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of pattern subtrees of an abstract clause.
+%%
+%% @see c_clause/3
+%% @see clause_arity/1
+
+clause_pats(Node) ->
+ Node#clause.pats.
+
+
+%% @spec clause_guard(cerl()) -> cerl()
+%%
+%% @doc Returns the guard subtree of an abstract clause.
+%%
+%% @see c_clause/3
+
+clause_guard(Node) ->
+ Node#clause.guard.
+
+
+%% @spec clause_body(cerl()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract clause.
+%%
+%% @see c_clause/3
+
+clause_body(Node) ->
+ Node#clause.body.
+
+
+%% @spec clause_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of pattern subtrees of an abstract clause.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(clause_pats(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_clause/3
+%% @see clause_pats/1
+
+clause_arity(Node) ->
+ length(clause_pats(Node)).
+
+
+%% @spec clause_vars(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of all abstract variables in the patterns of
+%% an abstract clause. The order of listing is not defined.
+%%
+%% @see c_clause/3
+%% @see pat_list_vars/1
+
+clause_vars(Clause) ->
+ pat_list_vars(clause_pats(Clause)).
+
+
+%% @spec pat_vars(Pattern::cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of all abstract variables in a pattern. An
+%% exception is thrown if <code>Node</code> does not represent a
+%% well-formed Core Erlang clause pattern. The order of listing is not
+%% defined.
+%%
+%% @see pat_list_vars/1
+%% @see clause_vars/1
+
+pat_vars(Node) ->
+ pat_vars(Node, []).
+
+pat_vars(Node, Vs) ->
+ case type(Node) of
+ var ->
+ [Node | Vs];
+ literal ->
+ Vs;
+ cons ->
+ pat_vars(cons_hd(Node), pat_vars(cons_tl(Node), Vs));
+ tuple ->
+ pat_list_vars(tuple_es(Node), Vs);
+ binary ->
+ pat_list_vars(binary_segments(Node), Vs);
+ bitstr ->
+ pat_vars(bitstr_val(Node), Vs);
+ alias ->
+ pat_vars(alias_pat(Node), [alias_var(Node) | Vs])
+ end.
+
+
+%% @spec pat_list_vars(Patterns::[cerl()]) -> [cerl()]
+%%
+%% @doc Returns the list of all abstract variables in the given
+%% patterns. An exception is thrown if some element in
+%% <code>Patterns</code> does not represent a well-formed Core Erlang
+%% clause pattern. The order of listing is not defined.
+%%
+%% @see pat_vars/1
+%% @see clause_vars/1
+
+pat_list_vars(Ps) ->
+ pat_list_vars(Ps, []).
+
+pat_list_vars([P | Ps], Vs) ->
+ pat_list_vars(Ps, pat_vars(P, Vs));
+pat_list_vars([], Vs) ->
+ Vs.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_alias(Variable::cerl(), Pattern::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract pattern alias. The result represents
+%% "<code><em>Variable</em> = <em>Pattern</em></code>".
+%%
+%% @see ann_c_alias/3
+%% @see update_c_alias/3
+%% @see is_c_alias/1
+%% @see alias_var/1
+%% @see alias_pat/1
+%% @see c_clause/3
+
+-record(alias, {ann = [], var, pat}).
+
+c_alias(Var, Pattern) ->
+ #alias{var = Var, pat = Pattern}.
+
+
+%% @spec ann_c_alias(As::[term()], Variable::cerl(),
+%% Pattern::cerl()) -> cerl()
+%% @see c_alias/2
+
+ann_c_alias(As, Var, Pattern) ->
+ #alias{var = Var, pat = Pattern, ann = As}.
+
+
+%% @spec update_c_alias(Old::cerl(), Variable::cerl(),
+%% Pattern::cerl()) -> cerl()
+%% @see c_alias/2
+
+update_c_alias(Node, Var, Pattern) ->
+ #alias{var = Var, pat = Pattern, ann = get_ann(Node)}.
+
+
+%% @spec is_c_alias(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% pattern alias, otherwise <code>false</code>.
+%%
+%% @see c_alias/2
+
+is_c_alias(#alias{}) ->
+ true;
+is_c_alias(_) ->
+ false.
+
+
+%% @spec alias_var(cerl()) -> cerl()
+%%
+%% @doc Returns the variable subtree of an abstract pattern alias.
+%%
+%% @see c_alias/2
+
+alias_var(Node) ->
+ Node#alias.var.
+
+
+%% @spec alias_pat(cerl()) -> cerl()
+%%
+%% @doc Returns the pattern subtree of an abstract pattern alias.
+%%
+%% @see c_alias/2
+
+alias_pat(Node) ->
+ Node#alias.pat.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_receive(Clauses::[cerl()]) -> cerl()
+%% @equiv c_receive(Clauses, c_atom(infinity), c_atom(true))
+%% @see c_atom/1
+
+c_receive(Clauses) ->
+ c_receive(Clauses, c_atom(infinity), c_atom(true)).
+
+
+%% @spec c_receive(Clauses::[cerl()], Timeout::cerl(),
+%% Action::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract receive-expression. If
+%% <code>Clauses</code> is <code>[C1, ..., Cn]</code>, the result
+%% represents "<code>receive <em>C1</em> ... <em>Cn</em> after
+%% <em>Timeout</em> -> <em>Action</em> end</code>".
+%%
+%% @see c_receive/1
+%% @see ann_c_receive/4
+%% @see update_c_receive/4
+%% @see is_c_receive/1
+%% @see receive_clauses/1
+%% @see receive_timeout/1
+%% @see receive_action/1
+
+-record('receive', {ann = [], clauses, timeout, action}).
+
+c_receive(Clauses, Timeout, Action) ->
+ #'receive'{clauses = Clauses, timeout = Timeout, action = Action}.
+
+
+%% @spec ann_c_receive(As::[term()], Clauses::[cerl()]) -> cerl()
+%% @equiv ann_c_receive(As, Clauses, c_atom(infinity), c_atom(true))
+%% @see c_receive/3
+%% @see c_atom/1
+
+ann_c_receive(As, Clauses) ->
+ ann_c_receive(As, Clauses, c_atom(infinity), c_atom(true)).
+
+
+%% @spec ann_c_receive(As::[term()], Clauses::[cerl()],
+%% Timeout::cerl(), Action::cerl()) -> cerl()
+%% @see ann_c_receive/2
+%% @see c_receive/3
+
+ann_c_receive(As, Clauses, Timeout, Action) ->
+ #'receive'{clauses = Clauses, timeout = Timeout, action = Action,
+ ann = As}.
+
+
+%% @spec update_c_receive(Old::cerl(), Clauses::[cerl()],
+%% Timeout::cerl(), Action::cerl()) -> cerl()
+%% @see c_receive/3
+
+update_c_receive(Node, Clauses, Timeout, Action) ->
+ #'receive'{clauses = Clauses, timeout = Timeout, action = Action,
+ ann = get_ann(Node)}.
+
+
+%% @spec is_c_receive(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% receive-expression, otherwise <code>false</code>.
+%%
+%% @see c_receive/3
+
+is_c_receive(#'receive'{}) ->
+ true;
+is_c_receive(_) ->
+ false.
+
+
+%% @spec receive_clauses(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of clause subtrees of an abstract
+%% receive-expression.
+%%
+%% @see c_receive/3
+
+receive_clauses(Node) ->
+ Node#'receive'.clauses.
+
+
+%% @spec receive_timeout(cerl()) -> cerl()
+%%
+%% @doc Returns the timeout subtree of an abstract receive-expression.
+%%
+%% @see c_receive/3
+
+receive_timeout(Node) ->
+ Node#'receive'.timeout.
+
+
+%% @spec receive_action(cerl()) -> cerl()
+%%
+%% @doc Returns the action subtree of an abstract receive-expression.
+%%
+%% @see c_receive/3
+
+receive_action(Node) ->
+ Node#'receive'.action.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_apply(Operator::cerl(), Arguments::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract function application. If
+%% <code>Arguments</code> is <code>[A1, ..., An]</code>, the result
+%% represents "<code>apply <em>Operator</em>(<em>A1</em>, ...,
+%% <em>An</em>)</code>".
+%%
+%% @see ann_c_apply/3
+%% @see update_c_apply/3
+%% @see is_c_apply/1
+%% @see apply_op/1
+%% @see apply_args/1
+%% @see apply_arity/1
+%% @see c_call/3
+%% @see c_primop/2
+
+-record(apply, {ann = [], op, args}).
+
+c_apply(Operator, Arguments) ->
+ #apply{op = Operator, args = Arguments}.
+
+
+%% @spec ann_c_apply(As::[term()], Operator::cerl(),
+%% Arguments::[cerl()]) -> cerl()
+%% @see c_apply/2
+
+ann_c_apply(As, Operator, Arguments) ->
+ #apply{op = Operator, args = Arguments, ann = As}.
+
+
+%% @spec update_c_apply(Old::cerl(), Operator::cerl(),
+%% Arguments::[cerl()]) -> cerl()
+%% @see c_apply/2
+
+update_c_apply(Node, Operator, Arguments) ->
+ #apply{op = Operator, args = Arguments, ann = get_ann(Node)}.
+
+
+%% @spec is_c_apply(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% function application, otherwise <code>false</code>.
+%%
+%% @see c_apply/2
+
+is_c_apply(#apply{}) ->
+ true;
+is_c_apply(_) ->
+ false.
+
+
+%% @spec apply_op(cerl()) -> cerl()
+%%
+%% @doc Returns the operator subtree of an abstract function
+%% application.
+%%
+%% @see c_apply/2
+
+apply_op(Node) ->
+ Node#apply.op.
+
+
+%% @spec apply_args(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of argument subtrees of an abstract function
+%% application.
+%%
+%% @see c_apply/2
+%% @see apply_arity/1
+
+apply_args(Node) ->
+ Node#apply.args.
+
+
+%% @spec apply_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of argument subtrees of an abstract
+%% function application.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(apply_args(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_apply/2
+%% @see apply_args/1
+
+apply_arity(Node) ->
+ length(apply_args(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_call(Module::cerl(), Name::cerl(), Arguments::[cerl()]) ->
+%% cerl()
+%%
+%% @doc Creates an abstract inter-module call. If
+%% <code>Arguments</code> is <code>[A1, ..., An]</code>, the result
+%% represents "<code>call <em>Module</em>:<em>Name</em>(<em>A1</em>,
+%% ..., <em>An</em>)</code>".
+%%
+%% @see ann_c_call/4
+%% @see update_c_call/4
+%% @see is_c_call/1
+%% @see call_module/1
+%% @see call_name/1
+%% @see call_args/1
+%% @see call_arity/1
+%% @see c_apply/2
+%% @see c_primop/2
+
+-record(call, {ann = [], module, name, args}).
+
+c_call(Module, Name, Arguments) ->
+ #call{module = Module, name = Name, args = Arguments}.
+
+
+%% @spec ann_c_call(As::[term()], Module::cerl(), Name::cerl(),
+%% Arguments::[cerl()]) -> cerl()
+%% @see c_call/3
+
+ann_c_call(As, Module, Name, Arguments) ->
+ #call{module = Module, name = Name, args = Arguments, ann = As}.
+
+
+%% @spec update_c_call(Old::cerl(), Module::cerl(), Name::cerl(),
+%% Arguments::[cerl()]) -> cerl()
+%% @see c_call/3
+
+update_c_call(Node, Module, Name, Arguments) ->
+ #call{module = Module, name = Name, args = Arguments,
+ ann = get_ann(Node)}.
+
+
+%% @spec is_c_call(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% inter-module call expression; otherwise <code>false</code>.
+%%
+%% @see c_call/3
+
+is_c_call(#call{}) ->
+ true;
+is_c_call(_) ->
+ false.
+
+
+%% @spec call_module(cerl()) -> cerl()
+%%
+%% @doc Returns the module subtree of an abstract inter-module call.
+%%
+%% @see c_call/3
+
+call_module(Node) ->
+ Node#call.module.
+
+
+%% @spec call_name(cerl()) -> cerl()
+%%
+%% @doc Returns the name subtree of an abstract inter-module call.
+%%
+%% @see c_call/3
+
+call_name(Node) ->
+ Node#call.name.
+
+
+%% @spec call_args(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of argument subtrees of an abstract
+%% inter-module call.
+%%
+%% @see c_call/3
+%% @see call_arity/1
+
+call_args(Node) ->
+ Node#call.args.
+
+
+%% @spec call_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of argument subtrees of an abstract
+%% inter-module call.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(call_args(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_call/3
+%% @see call_args/1
+
+call_arity(Node) ->
+ length(call_args(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_primop(Name::cerl(), Arguments::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract primitive operation call. If
+%% <code>Arguments</code> is <code>[A1, ..., An]</code>, the result
+%% represents "<code>primop <em>Name</em>(<em>A1</em>, ...,
+%% <em>An</em>)</code>". <code>Name</code> must be an atom literal.
+%%
+%% @see ann_c_primop/3
+%% @see update_c_primop/3
+%% @see is_c_primop/1
+%% @see primop_name/1
+%% @see primop_args/1
+%% @see primop_arity/1
+%% @see c_apply/2
+%% @see c_call/3
+
+-record(primop, {ann = [], name, args}).
+
+c_primop(Name, Arguments) ->
+ #primop{name = Name, args = Arguments}.
+
+
+%% @spec ann_c_primop(As::[term()], Name::cerl(),
+%% Arguments::[cerl()]) -> cerl()
+%% @see c_primop/2
+
+ann_c_primop(As, Name, Arguments) ->
+ #primop{name = Name, args = Arguments, ann = As}.
+
+
+%% @spec update_c_primop(Old::cerl(), Name::cerl(),
+%% Arguments::[cerl()]) -> cerl()
+%% @see c_primop/2
+
+update_c_primop(Node, Name, Arguments) ->
+ #primop{name = Name, args = Arguments, ann = get_ann(Node)}.
+
+
+%% @spec is_c_primop(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% primitive operation call, otherwise <code>false</code>.
+%%
+%% @see c_primop/2
+
+is_c_primop(#primop{}) ->
+ true;
+is_c_primop(_) ->
+ false.
+
+
+%% @spec primop_name(cerl()) -> cerl()
+%%
+%% @doc Returns the name subtree of an abstract primitive operation
+%% call.
+%%
+%% @see c_primop/2
+
+primop_name(Node) ->
+ Node#primop.name.
+
+
+%% @spec primop_args(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of argument subtrees of an abstract primitive
+%% operation call.
+%%
+%% @see c_primop/2
+%% @see primop_arity/1
+
+primop_args(Node) ->
+ Node#primop.args.
+
+
+%% @spec primop_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of argument subtrees of an abstract
+%% primitive operation call.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(primop_args(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_primop/2
+%% @see primop_args/1
+
+primop_arity(Node) ->
+ length(primop_args(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_try(Argument::cerl(), Variables::[cerl()], Body::cerl(),
+%% ExceptionVars::[cerl()], Handler::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract try-expression. If <code>Variables</code> is
+%% <code>[V1, ..., Vn]</code> and <code>ExceptionVars</code> is
+%% <code>[X1, ..., Xm]</code>, the result represents "<code>try
+%% <em>Argument</em> of &lt;<em>V1</em>, ..., <em>Vn</em>&gt; ->
+%% <em>Body</em> catch &lt;<em>X1</em>, ..., <em>Xm</em>&gt; ->
+%% <em>Handler</em></code>". All the <code>Vi</code> and <code>Xi</code>
+%% must have type <code>var</code>.
+%%
+%% @see ann_c_try/6
+%% @see update_c_try/6
+%% @see is_c_try/1
+%% @see try_arg/1
+%% @see try_vars/1
+%% @see try_body/1
+%% @see c_catch/1
+
+-record('try', {ann = [], arg, vars, body, evars, handler}).
+
+c_try(Expr, Vs, Body, Evs, Handler) ->
+ #'try'{arg = Expr, vars = Vs, body = Body,
+ evars = Evs, handler = Handler}.
+
+
+%% @spec ann_c_try(As::[term()], Expression::cerl(),
+%% Variables::[cerl()], Body::cerl(),
+%% EVars::[cerl()], EBody::[cerl()]) -> cerl()
+%% @see c_try/3
+
+ann_c_try(As, Expr, Vs, Body, Evs, Handler) ->
+ #'try'{arg = Expr, vars = Vs, body = Body,
+ evars = Evs, handler = Handler, ann = As}.
+
+
+%% @spec update_c_try(Old::cerl(), Expression::cerl(),
+%% Variables::[cerl()], Body::cerl(),
+%% EVars::[cerl()], EBody::[cerl()]) -> cerl()
+%% @see c_try/3
+
+update_c_try(Node, Expr, Vs, Body, Evs, Handler) ->
+ #'try'{arg = Expr, vars = Vs, body = Body,
+ evars = Evs, handler = Handler, ann = get_ann(Node)}.
+
+
+%% @spec is_c_try(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% try-expression, otherwise <code>false</code>.
+%%
+%% @see c_try/3
+
+is_c_try(#'try'{}) ->
+ true;
+is_c_try(_) ->
+ false.
+
+
+%% @spec try_arg(cerl()) -> cerl()
+%%
+%% @doc Returns the expression subtree of an abstract try-expression.
+%%
+%% @see c_try/3
+
+try_arg(Node) ->
+ Node#'try'.arg.
+
+
+%% @spec try_vars(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of success variable subtrees of an abstract
+%% try-expression.
+%%
+%% @see c_try/3
+
+try_vars(Node) ->
+ Node#'try'.vars.
+
+
+%% @spec try_body(cerl()) -> cerl()
+%%
+%% @doc Returns the success body subtree of an abstract try-expression.
+%%
+%% @see c_try/3
+
+try_body(Node) ->
+ Node#'try'.body.
+
+
+%% @spec try_evars(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of exception variable subtrees of an abstract
+%% try-expression.
+%%
+%% @see c_try/3
+
+try_evars(Node) ->
+ Node#'try'.evars.
+
+
+%% @spec try_handler(cerl()) -> cerl()
+%%
+%% @doc Returns the exception body subtree of an abstract
+%% try-expression.
+%%
+%% @see c_try/3
+
+try_handler(Node) ->
+ Node#'try'.handler.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_catch(Body::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract catch-expression. The result represents
+%% "<code>catch <em>Body</em></code>".
+%%
+%% <p>Note: catch-expressions can be rewritten as try-expressions, and
+%% will eventually be removed from Core Erlang.</p>
+%%
+%% @see ann_c_catch/2
+%% @see update_c_catch/2
+%% @see is_c_catch/1
+%% @see catch_body/1
+%% @see c_try/3
+
+-record('catch', {ann = [], body}).
+
+c_catch(Body) ->
+ #'catch'{body = Body}.
+
+
+%% @spec ann_c_catch(As::[term()], Body::cerl()) -> cerl()
+%% @see c_catch/1
+
+ann_c_catch(As, Body) ->
+ #'catch'{body = Body, ann = As}.
+
+
+%% @spec update_c_catch(Old::cerl(), Body::cerl()) -> cerl()
+%% @see c_catch/1
+
+update_c_catch(Node, Body) ->
+ #'catch'{body = Body, ann = get_ann(Node)}.
+
+
+%% @spec is_c_catch(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% catch-expression, otherwise <code>false</code>.
+%%
+%% @see c_catch/1
+
+is_c_catch(#'catch'{}) ->
+ true;
+is_c_catch(_) ->
+ false.
+
+
+%% @spec catch_body(Node::cerl()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract catch-expression.
+%%
+%% @see c_catch/1
+
+catch_body(Node) ->
+ Node#'catch'.body.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec to_records(Tree::cerl()) -> record(record_types())
+%%
+%% @doc Translates an abstract syntax tree to a corresponding explicit
+%% record representation. The records are defined in the file
+%% "<code>cerl.hrl</code>".
+%%
+%% <p>Note: Compound constant literals are always unfolded in the
+%% record representation.</p>
+%%
+%% @see type/1
+%% @see from_records/1
+
+to_records(Node) ->
+ A = get_ann(Node),
+ case type(Node) of
+ literal ->
+ lit_to_records(concrete(Node), A);
+ binary ->
+ #c_binary{anno = A,
+ segments =
+ list_to_records(binary_segments(Node))};
+ bitstr ->
+ #c_bitstr{anno = A,
+ val = to_records(bitstr_val(Node)),
+ size = to_records(bitstr_size(Node)),
+ unit = to_records(bitstr_unit(Node)),
+ type = to_records(bitstr_type(Node)),
+ flags = to_records(bitstr_flags(Node))};
+ cons ->
+ #c_cons{anno = A,
+ hd = to_records(cons_hd(Node)),
+ tl = to_records(cons_tl(Node))};
+ tuple ->
+ #c_tuple{anno = A,
+ es = list_to_records(tuple_es(Node))};
+ var ->
+ case is_c_fname(Node) of
+ true ->
+ #c_fname{anno = A,
+ id = fname_id(Node),
+ arity = fname_arity(Node)};
+ false ->
+ #c_var{anno = A, name = var_name(Node)}
+ end;
+ values ->
+ #c_values{anno = A,
+ es = list_to_records(values_es(Node))};
+ 'fun' ->
+ #c_fun{anno = A,
+ vars = list_to_records(fun_vars(Node)),
+ body = to_records(fun_body(Node))};
+ seq ->
+ #c_seq{anno = A,
+ arg = to_records(seq_arg(Node)),
+ body = to_records(seq_body(Node))};
+ 'let' ->
+ #c_let{anno = A,
+ vars = list_to_records(let_vars(Node)),
+ arg = to_records(let_arg(Node)),
+ body = to_records(let_body(Node))};
+ letrec ->
+ #c_letrec{anno = A,
+ defs = [#c_def{name = to_records(N),
+ val = to_records(F)}
+ || {N, F} <- letrec_defs(Node)],
+ body = to_records(letrec_body(Node))};
+ 'case' ->
+ #c_case{anno = A,
+ arg = to_records(case_arg(Node)),
+ clauses =
+ list_to_records(case_clauses(Node))};
+ clause ->
+ #c_clause{anno = A,
+ pats = list_to_records(clause_pats(Node)),
+ guard = to_records(clause_guard(Node)),
+ body = to_records(clause_body(Node))};
+ alias ->
+ #c_alias{anno = A,
+ var = to_records(alias_var(Node)),
+ pat = to_records(alias_pat(Node))};
+ 'receive' ->
+ #c_receive{anno = A,
+ clauses =
+ list_to_records(receive_clauses(Node)),
+ timeout =
+ to_records(receive_timeout(Node)),
+ action =
+ to_records(receive_action(Node))};
+ apply ->
+ #c_apply{anno = A,
+ op = to_records(apply_op(Node)),
+ args = list_to_records(apply_args(Node))};
+ call ->
+ #c_call{anno = A,
+ module = to_records(call_module(Node)),
+ name = to_records(call_name(Node)),
+ args = list_to_records(call_args(Node))};
+ primop ->
+ #c_primop{anno = A,
+ name = to_records(primop_name(Node)),
+ args = list_to_records(primop_args(Node))};
+ 'try' ->
+ #c_try{anno = A,
+ arg = to_records(try_arg(Node)),
+ vars = list_to_records(try_vars(Node)),
+ body = to_records(try_body(Node)),
+ evars = list_to_records(try_evars(Node)),
+ handler = to_records(try_handler(Node))};
+ 'catch' ->
+ #c_catch{anno = A,
+ body = to_records(catch_body(Node))};
+ module ->
+ #c_module{anno = A,
+ name = to_records(module_name(Node)),
+ exports = list_to_records(
+ module_exports(Node)),
+ attrs = [#c_def{name = to_records(K),
+ val = to_records(V)}
+ || {K, V} <- module_attrs(Node)],
+ defs = [#c_def{name = to_records(N),
+ val = to_records(F)}
+ || {N, F} <- module_defs(Node)]}
+ end.
+
+list_to_records([T | Ts]) ->
+ [to_records(T) | list_to_records(Ts)];
+list_to_records([]) ->
+ [].
+
+lit_to_records(V, A) when integer(V) ->
+ #c_int{anno = A, val = V};
+lit_to_records(V, A) when float(V) ->
+ #c_float{anno = A, val = V};
+lit_to_records(V, A) when atom(V) ->
+ #c_atom{anno = A, val = V};
+lit_to_records([H | T] = V, A) ->
+ case is_print_char_list(V) of
+ true ->
+ #c_string{anno = A, val = V};
+ false ->
+ #c_cons{anno = A,
+ hd = lit_to_records(H, []),
+ tl = lit_to_records(T, [])}
+ end;
+lit_to_records([], A) ->
+ #c_nil{anno = A};
+lit_to_records(V, A) when tuple(V) ->
+ #c_tuple{anno = A, es = lit_list_to_records(tuple_to_list(V))}.
+
+lit_list_to_records([T | Ts]) ->
+ [lit_to_records(T, []) | lit_list_to_records(Ts)];
+lit_list_to_records([]) ->
+ [].
+
+
+%% @spec from_records(Tree::record(record_types())) -> cerl()
+%%
+%% record_types() = c_alias | c_apply | c_call | c_case | c_catch |
+%% c_clause | c_cons | c_def| c_fun | c_let |
+%% c_letrec |c_lit | c_module | c_primop |
+%% c_receive | c_seq | c_try | c_tuple |
+%% c_values | c_var
+%%
+%% @doc Translates an explicit record representation to a
+%% corresponding abstract syntax tree. The records are defined in the
+%% file "<code>cerl.hrl</code>".
+%%
+%% <p>Note: Compound constant literals are folded, discarding
+%% annotations on subtrees. There are no <code>c_def</code> nodes in
+%% the abstract representation; annotations on <code>c_def</code>
+%% records are discarded.</p>
+%%
+%% @see type/1
+%% @see to_records/1
+
+from_records(#c_int{val = V, anno = As}) ->
+ ann_c_int(As, V);
+from_records(#c_float{val = V, anno = As}) ->
+ ann_c_float(As, V);
+from_records(#c_atom{val = V, anno = As}) ->
+ ann_c_atom(As, V);
+from_records(#c_char{val = V, anno = As}) ->
+ ann_c_char(As, V);
+from_records(#c_string{val = V, anno = As}) ->
+ ann_c_string(As, V);
+from_records(#c_nil{anno = As}) ->
+ ann_c_nil(As);
+from_records(#c_binary{segments = Ss, anno = As}) ->
+ ann_c_binary(As, from_records_list(Ss));
+from_records(#c_bitstr{val = V, size = S, unit = U, type = T,
+ flags = Fs, anno = As}) ->
+ ann_c_bitstr(As, from_records(V), from_records(S), from_records(U),
+ from_records(T), from_records(Fs));
+from_records(#c_cons{hd = H, tl = T, anno = As}) ->
+ ann_c_cons(As, from_records(H), from_records(T));
+from_records(#c_tuple{es = Es, anno = As}) ->
+ ann_c_tuple(As, from_records_list(Es));
+from_records(#c_var{name = Name, anno = As}) ->
+ ann_c_var(As, Name);
+from_records(#c_fname{id = Id, arity = Arity, anno = As}) ->
+ ann_c_fname(As, Id, Arity);
+from_records(#c_values{es = Es, anno = As}) ->
+ ann_c_values(As, from_records_list(Es));
+from_records(#c_fun{vars = Vs, body = B, anno = As}) ->
+ ann_c_fun(As, from_records_list(Vs), from_records(B));
+from_records(#c_seq{arg = A, body = B, anno = As}) ->
+ ann_c_seq(As, from_records(A), from_records(B));
+from_records(#c_let{vars = Vs, arg = A, body = B, anno = As}) ->
+ ann_c_let(As, from_records_list(Vs), from_records(A),
+ from_records(B));
+from_records(#c_letrec{defs = Fs, body = B, anno = As}) ->
+ ann_c_letrec(As, [{from_records(N), from_records(F)}
+ || #c_def{name = N, val = F} <- Fs],
+ from_records(B));
+from_records(#c_case{arg = A, clauses = Cs, anno = As}) ->
+ ann_c_case(As, from_records(A), from_records_list(Cs));
+from_records(#c_clause{pats = Ps, guard = G, body = B, anno = As}) ->
+ ann_c_clause(As, from_records_list(Ps), from_records(G),
+ from_records(B));
+from_records(#c_alias{var = V, pat = P, anno = As}) ->
+ ann_c_alias(As, from_records(V), from_records(P));
+from_records(#c_receive{clauses = Cs, timeout = T, action = A,
+ anno = As}) ->
+ ann_c_receive(As, from_records_list(Cs), from_records(T),
+ from_records(A));
+from_records(#c_apply{op = Op, args = Es, anno = As}) ->
+ ann_c_apply(As, from_records(Op), from_records_list(Es));
+from_records(#c_call{module = M, name = N, args = Es, anno = As}) ->
+ ann_c_call(As, from_records(M), from_records(N),
+ from_records_list(Es));
+from_records(#c_primop{name = N, args = Es, anno = As}) ->
+ ann_c_primop(As, from_records(N), from_records_list(Es));
+from_records(#c_try{arg = E, vars = Vs, body = B,
+ evars = Evs, handler = H, anno = As}) ->
+ ann_c_try(As, from_records(E), from_records_list(Vs),
+ from_records(B), from_records_list(Evs), from_records(H));
+from_records(#c_catch{body = B, anno = As}) ->
+ ann_c_catch(As, from_records(B));
+from_records(#c_module{name = N, exports = Es, attrs = Ds, defs = Fs,
+ anno = As}) ->
+ ann_c_module(As, from_records(N),
+ from_records_list(Es),
+ [{from_records(K), from_records(V)}
+ || #c_def{name = K, val = V} <- Ds],
+ [{from_records(V), from_records(F)}
+ || #c_def{name = V, val = F} <- Fs]).
+
+from_records_list([T | Ts]) ->
+ [from_records(T) | from_records_list(Ts)];
+from_records_list([]) ->
+ [].
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec is_data(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% data constructor, otherwise <code>false</code>. Data constructors
+%% are cons cells, tuples, and atomic literals.
+%%
+%% @see data_type/1
+%% @see data_es/1
+%% @see data_arity/1
+
+is_data(#literal{}) ->
+ true;
+is_data(#cons{}) ->
+ true;
+is_data(#tuple{}) ->
+ true;
+is_data(_) ->
+ false.
+
+
+%% @spec data_type(Node::cerl()) -> dtype()
+%%
+%% dtype() = cons | tuple | {'atomic', Value}
+%% Value = integer() | float() | atom() | []
+%%
+%% @doc Returns a type descriptor for a data constructor
+%% node. (Cf. <code>is_data/1</code>.) This is mainly useful for
+%% comparing types and for constructing new nodes of the same type
+%% (cf. <code>make_data/2</code>). If <code>Node</code> represents an
+%% integer, floating-point number, atom or empty list, the result is
+%% <code>{'atomic', Value}</code>, where <code>Value</code> is the value
+%% of <code>concrete(Node)</code>, otherwise the result is either
+%% <code>cons</code> or <code>tuple</code>.
+%%
+%% <p>Type descriptors can be compared for equality or order (in the
+%% Erlang term order), but remember that floating-point values should
+%% in general never be tested for equality.</p>
+%%
+%% @see is_data/1
+%% @see make_data/2
+%% @see type/1
+%% @see concrete/1
+
+data_type(#literal{val = V}) ->
+ case V of
+ [_ | _] ->
+ cons;
+ _ when tuple(V) ->
+ tuple;
+ _ ->
+ {'atomic', V}
+ end;
+data_type(#cons{}) ->
+ cons;
+data_type(#tuple{}) ->
+ tuple.
+
+
+%% @spec data_es(Node::cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of subtrees of a data constructor node. If
+%% the arity of the constructor is zero, the result is the empty list.
+%%
+%% <p>Note: if <code>data_type(Node)</code> is <code>cons</code>, the
+%% number of subtrees is exactly two. If <code>data_type(Node)</code>
+%% is <code>{'atomic', Value}</code>, the number of subtrees is
+%% zero.</p>
+%%
+%% @see is_data/1
+%% @see data_type/1
+%% @see data_arity/1
+%% @see make_data/2
+
+data_es(#literal{val = V}) ->
+ case V of
+ [Head | Tail] ->
+ [#literal{val = Head}, #literal{val = Tail}];
+ _ when tuple(V) ->
+ make_lit_list(tuple_to_list(V));
+ _ ->
+ []
+ end;
+data_es(#cons{hd = H, tl = T}) ->
+ [H, T];
+data_es(#tuple{es = Es}) ->
+ Es.
+
+
+%% @spec data_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of subtrees of a data constructor
+%% node. This is equivalent to <code>length(data_es(Node))</code>, but
+%% potentially more efficient.
+%%
+%% @see is_data/1
+%% @see data_es/1
+
+data_arity(#literal{val = V}) ->
+ case V of
+ [_ | _] ->
+ 2;
+ _ when tuple(V) ->
+ size(V);
+ _ ->
+ 0
+ end;
+data_arity(#cons{}) ->
+ 2;
+data_arity(#tuple{es = Es}) ->
+ length(Es).
+
+
+%% @spec make_data(Type::dtype(), Elements::[cerl()]) -> cerl()
+%%
+%% @doc Creates a data constructor node with the specified type and
+%% subtrees. (Cf. <code>data_type/1</code>.) An exception is thrown
+%% if the length of <code>Elements</code> is invalid for the given
+%% <code>Type</code>; see <code>data_es/1</code> for arity constraints
+%% on constructor types.
+%%
+%% @see data_type/1
+%% @see data_es/1
+%% @see ann_make_data/3
+%% @see update_data/3
+%% @see make_data_skel/2
+
+make_data(CType, Es) ->
+ ann_make_data([], CType, Es).
+
+
+%% @spec ann_make_data(As::[term()], Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data/2
+
+ann_make_data(As, {'atomic', V}, []) -> #literal{val = V, ann = As};
+ann_make_data(As, cons, [H, T]) -> ann_c_cons(As, H, T);
+ann_make_data(As, tuple, Es) -> ann_c_tuple(As, Es).
+
+
+%% @spec update_data(Old::cerl(), Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data/2
+
+update_data(Node, CType, Es) ->
+ ann_make_data(get_ann(Node), CType, Es).
+
+
+%% @spec make_data_skel(Type::dtype(), Elements::[cerl()]) -> cerl()
+%%
+%% @doc Like <code>make_data/2</code>, but analogous to
+%% <code>c_tuple_skel/1</code> and <code>c_cons_skel/2</code>.
+%%
+%% @see ann_make_data_skel/3
+%% @see update_data_skel/3
+%% @see make_data/2
+%% @see c_tuple_skel/1
+%% @see c_cons_skel/2
+
+make_data_skel(CType, Es) ->
+ ann_make_data_skel([], CType, Es).
+
+
+%% @spec ann_make_data_skel(As::[term()], Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data_skel/2
+
+ann_make_data_skel(As, {'atomic', V}, []) -> #literal{val = V, ann = As};
+ann_make_data_skel(As, cons, [H, T]) -> ann_c_cons_skel(As, H, T);
+ann_make_data_skel(As, tuple, Es) -> ann_c_tuple_skel(As, Es).
+
+
+%% @spec update_data_skel(Old::cerl(), Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data_skel/2
+
+update_data_skel(Node, CType, Es) ->
+ ann_make_data_skel(get_ann(Node), CType, Es).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec subtrees(Node::cerl()) -> [[cerl()]]
+%%
+%% @doc Returns the grouped list of all subtrees of a node. If
+%% <code>Node</code> is a leaf node (cf. <code>is_leaf/1</code>), this
+%% is the empty list, otherwise the result is always a nonempty list,
+%% containing the lists of subtrees of <code>Node</code>, in
+%% left-to-right order as they occur in the printed program text, and
+%% grouped by category. Often, each group contains only a single
+%% subtree.
+%%
+%% <p>Depending on the type of <code>Node</code>, the size of some
+%% groups may be variable (e.g., the group consisting of all the
+%% elements of a tuple), while others always contain the same number
+%% of elements - usually exactly one (e.g., the group containing the
+%% argument expression of a case-expression). Note, however, that the
+%% exact structure of the returned list (for a given node type) should
+%% in general not be depended upon, since it might be subject to
+%% change without notice.</p>
+%%
+%% <p>The function <code>subtrees/1</code> and the constructor functions
+%% <code>make_tree/2</code> and <code>update_tree/2</code> can be a
+%% great help if one wants to traverse a syntax tree, visiting all its
+%% subtrees, but treat nodes of the tree in a uniform way in most or all
+%% cases. Using these functions makes this simple, and also assures that
+%% your code is not overly sensitive to extensions of the syntax tree
+%% data type, because any node types not explicitly handled by your code
+%% can be left to a default case.</p>
+%%
+%% <p>For example:
+%% <pre>
+%% postorder(F, Tree) ->
+%% F(case subtrees(Tree) of
+%% [] -> Tree;
+%% List -> update_tree(Tree,
+%% [[postorder(F, Subtree)
+%% || Subtree &lt;- Group]
+%% || Group &lt;- List])
+%% end).
+%% </pre>
+%% maps the function <code>F</code> on <code>Tree</code> and all its
+%% subtrees, doing a post-order traversal of the syntax tree. (Note
+%% the use of <code>update_tree/2</code> to preserve annotations.) For
+%% a simple function like:
+%% <pre>
+%% f(Node) ->
+%% case type(Node) of
+%% atom -> atom("a_" ++ atom_name(Node));
+%% _ -> Node
+%% end.
+%% </pre>
+%% the call <code>postorder(fun f/1, Tree)</code> will yield a new
+%% representation of <code>Tree</code> in which all atom names have
+%% been extended with the prefix "a_", but nothing else (including
+%% annotations) has been changed.</p>
+%%
+%% @see is_leaf/1
+%% @see make_tree/2
+%% @see update_tree/2
+
+subtrees(T) ->
+ case is_leaf(T) of
+ true ->
+ [];
+ false ->
+ case type(T) of
+ values ->
+ [values_es(T)];
+ binary ->
+ [binary_segments(T)];
+ bitstr ->
+ [[bitstr_val(T)], [bitstr_size(T)],
+ [bitstr_unit(T)], [bitstr_type(T)],
+ [bitstr_flags(T)]];
+ cons ->
+ [[cons_hd(T)], [cons_tl(T)]];
+ tuple ->
+ [tuple_es(T)];
+ 'let' ->
+ [let_vars(T), [let_arg(T)], [let_body(T)]];
+ seq ->
+ [[seq_arg(T)], [seq_body(T)]];
+ apply ->
+ [[apply_op(T)], apply_args(T)];
+ call ->
+ [[call_module(T)], [call_name(T)],
+ call_args(T)];
+ primop ->
+ [[primop_name(T)], primop_args(T)];
+ 'case' ->
+ [[case_arg(T)], case_clauses(T)];
+ clause ->
+ [clause_pats(T), [clause_guard(T)],
+ [clause_body(T)]];
+ alias ->
+ [[alias_var(T)], [alias_pat(T)]];
+ 'fun' ->
+ [fun_vars(T), [fun_body(T)]];
+ 'receive' ->
+ [receive_clauses(T), [receive_timeout(T)],
+ [receive_action(T)]];
+ 'try' ->
+ [[try_arg(T)], try_vars(T), [try_body(T)],
+ try_evars(T), [try_handler(T)]];
+ 'catch' ->
+ [[catch_body(T)]];
+ letrec ->
+ Es = unfold_tuples(letrec_defs(T)),
+ [Es, [letrec_body(T)]];
+ module ->
+ As = unfold_tuples(module_attrs(T)),
+ Es = unfold_tuples(module_defs(T)),
+ [[module_name(T)], module_exports(T), As, Es]
+ end
+ end.
+
+
+%% @spec update_tree(Old::cerl(), Groups::[[cerl()]]) -> cerl()
+%%
+%% @doc Creates a syntax tree with the given subtrees, and the same
+%% type and annotations as the <code>Old</code> node. This is
+%% equivalent to <code>ann_make_tree(get_ann(Node), type(Node),
+%% Groups)</code>, but potentially more efficient.
+%%
+%% @see update_tree/3
+%% @see ann_make_tree/3
+%% @see get_ann/1
+%% @see type/1
+
+update_tree(Node, Gs) ->
+ ann_make_tree(get_ann(Node), type(Node), Gs).
+
+
+%% @spec update_tree(Old::cerl(), Type::atom(), Groups::[[cerl()]]) ->
+%% cerl()
+%%
+%% @doc Creates a syntax tree with the given type and subtrees, and
+%% the same annotations as the <code>Old</code> node. This is
+%% equivalent to <code>ann_make_tree(get_ann(Node), Type,
+%% Groups)</code>, but potentially more efficient.
+%%
+%% @see update_tree/2
+%% @see ann_make_tree/3
+%% @see get_ann/1
+
+update_tree(Node, Type, Gs) ->
+ ann_make_tree(get_ann(Node), Type, Gs).
+
+
+%% @spec make_tree(Type::atom(), Groups::[[cerl()]]) -> cerl()
+%%
+%% @doc Creates a syntax tree with the given type and subtrees.
+%% <code>Type</code> must be a node type name
+%% (cf. <code>type/1</code>) that does not denote a leaf node type
+%% (cf. <code>is_leaf/1</code>). <code>Groups</code> must be a
+%% <em>nonempty</em> list of groups of syntax trees, representing the
+%% subtrees of a node of the given type, in left-to-right order as
+%% they would occur in the printed program text, grouped by category
+%% as done by <code>subtrees/1</code>.
+%%
+%% <p>The result of <code>ann_make_tree(get_ann(Node), type(Node),
+%% subtrees(Node))</code> (cf. <code>update_tree/2</code>) represents
+%% the same source code text as the original <code>Node</code>,
+%% assuming that <code>subtrees(Node)</code> yields a nonempty
+%% list. However, it does not necessarily have the exact same data
+%% representation as <code>Node</code>.</p>
+%%
+%% @see ann_make_tree/3
+%% @see type/1
+%% @see is_leaf/1
+%% @see subtrees/1
+%% @see update_tree/2
+
+make_tree(Type, Gs) ->
+ ann_make_tree([], Type, Gs).
+
+
+%% @spec ann_make_tree(As::[term()], Type::atom(),
+%% Groups::[[cerl()]]) -> cerl()
+%%
+%% @doc Creates a syntax tree with the given annotations, type and
+%% subtrees. See <code>make_tree/2</code> for details.
+%%
+%% @see make_tree/2
+
+ann_make_tree(As, values, [Es]) -> ann_c_values(As, Es);
+ann_make_tree(As, binary, [Ss]) -> ann_c_binary(As, Ss);
+ann_make_tree(As, bitstr, [[V],[S],[U],[T],[Fs]]) ->
+ ann_c_bitstr(As, V, S, U, T, Fs);
+ann_make_tree(As, cons, [[H], [T]]) -> ann_c_cons(As, H, T);
+ann_make_tree(As, tuple, [Es]) -> ann_c_tuple(As, Es);
+ann_make_tree(As, 'let', [Vs, [A], [B]]) -> ann_c_let(As, Vs, A, B);
+ann_make_tree(As, seq, [[A], [B]]) -> ann_c_seq(As, A, B);
+ann_make_tree(As, apply, [[Op], Es]) -> ann_c_apply(As, Op, Es);
+ann_make_tree(As, call, [[M], [N], Es]) -> ann_c_call(As, M, N, Es);
+ann_make_tree(As, primop, [[N], Es]) -> ann_c_primop(As, N, Es);
+ann_make_tree(As, 'case', [[A], Cs]) -> ann_c_case(As, A, Cs);
+ann_make_tree(As, clause, [Ps, [G], [B]]) -> ann_c_clause(As, Ps, G, B);
+ann_make_tree(As, alias, [[V], [P]]) -> ann_c_alias(As, V, P);
+ann_make_tree(As, 'fun', [Vs, [B]]) -> ann_c_fun(As, Vs, B);
+ann_make_tree(As, 'receive', [Cs, [T], [A]]) ->
+ ann_c_receive(As, Cs, T, A);
+ann_make_tree(As, 'try', [[E], Vs, [B], Evs, [H]]) ->
+ ann_c_try(As, E, Vs, B, Evs, H);
+ann_make_tree(As, 'catch', [[B]]) -> ann_c_catch(As, B);
+ann_make_tree(As, letrec, [Es, [B]]) ->
+ ann_c_letrec(As, fold_tuples(Es), B);
+ann_make_tree(As, module, [[N], Xs, Es, Ds]) ->
+ ann_c_module(As, N, Xs, fold_tuples(Es), fold_tuples(Ds)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec meta(Tree::cerl()) -> cerl()
+%%
+%% @doc Creates a meta-representation of a syntax tree. The result
+%% represents an Erlang expression "<code><em>MetaTree</em></code>"
+%% which, if evaluated, will yield a new syntax tree representing the
+%% same source code text as <code>Tree</code> (although the actual
+%% data representation may be different). The expression represented
+%% by <code>MetaTree</code> is <em>implementation independent</em>
+%% with regard to the data structures used by the abstract syntax tree
+%% implementation.
+%%
+%% <p>Any node in <code>Tree</code> whose node type is
+%% <code>var</code> (cf. <code>type/1</code>), and whose list of
+%% annotations (cf. <code>get_ann/1</code>) contains the atom
+%% <code>meta_var</code>, will remain unchanged in the resulting tree,
+%% except that exactly one occurrence of <code>meta_var</code> is
+%% removed from its annotation list.</p>
+%%
+%% <p>The main use of the function <code>meta/1</code> is to transform
+%% a data structure <code>Tree</code>, which represents a piece of
+%% program code, into a form that is <em>representation independent
+%% when printed</em>. E.g., suppose <code>Tree</code> represents a
+%% variable named "V". Then (assuming a function <code>print/1</code>
+%% for printing syntax trees), evaluating
+%% <code>print(abstract(Tree))</code> - simply using
+%% <code>abstract/1</code> to map the actual data structure onto a
+%% syntax tree representation - would output a string that might look
+%% something like "<code>{var, ..., 'V'}</code>", which is obviously
+%% dependent on the implementation of the abstract syntax trees. This
+%% could e.g. be useful for caching a syntax tree in a file. However,
+%% in some situations like in a program generator generator (with two
+%% "generator"), it may be unacceptable. Using
+%% <code>print(meta(Tree))</code> instead would output a
+%% <em>representation independent</em> syntax tree generating
+%% expression; in the above case, something like
+%% "<code>cerl:c_var('V')</code>".</p>
+%%
+%% <p>The implementation tries to generate compact code with respect
+%% to literals and lists.</p>
+%%
+%% @see abstract/1
+%% @see type/1
+%% @see get_ann/1
+
+meta(Node) ->
+ %% First of all we check for metavariables:
+ case type(Node) of
+ var ->
+ case lists:member(meta_var, get_ann(Node)) of
+ false ->
+ meta_0(var, Node);
+ true ->
+ %% A meta-variable: remove the first found
+ %% 'meta_var' annotation, but otherwise leave
+ %% the node unchanged.
+ set_ann(Node, lists:delete(meta_var, get_ann(Node)))
+ end;
+ Type ->
+ meta_0(Type, Node)
+ end.
+
+meta_0(Type, Node) ->
+ case get_ann(Node) of
+ [] ->
+ meta_1(Type, Node);
+ As ->
+ meta_call(set_ann, [meta_1(Type, Node), abstract(As)])
+ end.
+
+meta_1(literal, Node) ->
+ %% We handle atomic literals separately, to get a bit
+ %% more compact code. For the rest, we use 'abstract'.
+ case concrete(Node) of
+ V when atom(V) ->
+ meta_call(c_atom, [Node]);
+ V when integer(V) ->
+ meta_call(c_int, [Node]);
+ V when float(V) ->
+ meta_call(c_float, [Node]);
+ [] ->
+ meta_call(c_nil, []);
+ _ ->
+ meta_call(abstract, [Node])
+ end;
+meta_1(var, Node) ->
+ %% A normal variable or function name.
+ meta_call(c_var, [abstract(var_name(Node))]);
+meta_1(values, Node) ->
+ meta_call(c_values,
+ [make_list(meta_list(values_es(Node)))]);
+meta_1(binary, Node) ->
+ meta_call(c_binary,
+ [make_list(meta_list(binary_segments(Node)))]);
+meta_1(bitstr, Node) ->
+ meta_call(c_bitstr,
+ [meta(bitstr_val(Node)),
+ meta(bitstr_size(Node)),
+ meta(bitstr_unit(Node)),
+ meta(bitstr_type(Node)),
+ meta(bitstr_flags(Node))]);
+meta_1(cons, Node) ->
+ %% The list is split up if some sublist has annotatations. If
+ %% we get exactly one element, we generate a 'c_cons' call
+ %% instead of 'make_list' to reconstruct the node.
+ case split_list(Node) of
+ {[H], none} ->
+ meta_call(c_cons, [meta(H), meta(c_nil())]);
+ {[H], Node1} ->
+ meta_call(c_cons, [meta(H), meta(Node1)]);
+ {L, none} ->
+ meta_call(make_list, [make_list(meta_list(L))]);
+ {L, Node1} ->
+ meta_call(make_list,
+ [make_list(meta_list(L)), meta(Node1)])
+ end;
+meta_1(tuple, Node) ->
+ meta_call(c_tuple,
+ [make_list(meta_list(tuple_es(Node)))]);
+meta_1('let', Node) ->
+ meta_call(c_let,
+ [make_list(meta_list(let_vars(Node))),
+ meta(let_arg(Node)), meta(let_body(Node))]);
+meta_1(seq, Node) ->
+ meta_call(c_seq,
+ [meta(seq_arg(Node)), meta(seq_body(Node))]);
+meta_1(apply, Node) ->
+ meta_call(c_apply,
+ [meta(apply_op(Node)),
+ make_list(meta_list(apply_args(Node)))]);
+meta_1(call, Node) ->
+ meta_call(c_call,
+ [meta(call_module(Node)), meta(call_name(Node)),
+ make_list(meta_list(call_args(Node)))]);
+meta_1(primop, Node) ->
+ meta_call(c_primop,
+ [meta(primop_name(Node)),
+ make_list(meta_list(primop_args(Node)))]);
+meta_1('case', Node) ->
+ meta_call(c_case,
+ [meta(case_arg(Node)),
+ make_list(meta_list(case_clauses(Node)))]);
+meta_1(clause, Node) ->
+ meta_call(c_clause,
+ [make_list(meta_list(clause_pats(Node))),
+ meta(clause_guard(Node)),
+ meta(clause_body(Node))]);
+meta_1(alias, Node) ->
+ meta_call(c_alias,
+ [meta(alias_var(Node)), meta(alias_pat(Node))]);
+meta_1('fun', Node) ->
+ meta_call(c_fun,
+ [make_list(meta_list(fun_vars(Node))),
+ meta(fun_body(Node))]);
+meta_1('receive', Node) ->
+ meta_call(c_receive,
+ [make_list(meta_list(receive_clauses(Node))),
+ meta(receive_timeout(Node)),
+ meta(receive_action(Node))]);
+meta_1('try', Node) ->
+ meta_call(c_try,
+ [meta(try_arg(Node)),
+ make_list(meta_list(try_vars(Node))),
+ meta(try_body(Node)),
+ make_list(meta_list(try_evars(Node))),
+ meta(try_handler(Node))]);
+meta_1('catch', Node) ->
+ meta_call(c_catch, [meta(catch_body(Node))]);
+meta_1(letrec, Node) ->
+ meta_call(c_letrec,
+ [make_list([c_tuple([meta(N), meta(F)])
+ || {N, F} <- letrec_defs(Node)]),
+ meta(letrec_body(Node))]);
+meta_1(module, Node) ->
+ meta_call(c_module,
+ [meta(module_name(Node)),
+ make_list(meta_list(module_exports(Node))),
+ make_list([c_tuple([meta(A), meta(V)])
+ || {A, V} <- module_attrs(Node)]),
+ make_list([c_tuple([meta(N), meta(F)])
+ || {N, F} <- module_defs(Node)])]).
+
+meta_call(F, As) ->
+ c_call(c_atom(?MODULE), c_atom(F), As).
+
+meta_list([T | Ts]) ->
+ [meta(T) | meta_list(Ts)];
+meta_list([]) ->
+ [].
+
+split_list(Node) ->
+ split_list(set_ann(Node, []), []).
+
+split_list(Node, L) ->
+ A = get_ann(Node),
+ case type(Node) of
+ cons when A == [] ->
+ split_list(cons_tl(Node), [cons_hd(Node) | L]);
+ nil when A == [] ->
+ {lists:reverse(L), none};
+ _ ->
+ {lists:reverse(L), Node}
+ end.
+
+
+%% ---------------------------------------------------------------------
+
+%% General utilities
+
+is_lit_list([#literal{} | Es]) ->
+ is_lit_list(Es);
+is_lit_list([_ | _]) ->
+ false;
+is_lit_list([]) ->
+ true.
+
+lit_list_vals([#literal{val = V} | Es]) ->
+ [V | lit_list_vals(Es)];
+lit_list_vals([]) ->
+ [].
+
+make_lit_list([V | Vs]) ->
+ [#literal{val = V} | make_lit_list(Vs)];
+make_lit_list([]) ->
+ [].
+
+%% The following tests are the same as done by 'io_lib:char_list' and
+%% 'io_lib:printable_list', respectively, but for a single character.
+
+is_char_value(V) when V >= $\000, V =< $\377 -> true;
+is_char_value(_) -> false.
+
+is_print_char_value(V) when V >= $\040, V =< $\176 -> true;
+is_print_char_value(V) when V >= $\240, V =< $\377 -> true;
+is_print_char_value(V) when V =:= $\b -> true;
+is_print_char_value(V) when V =:= $\d -> true;
+is_print_char_value(V) when V =:= $\e -> true;
+is_print_char_value(V) when V =:= $\f -> true;
+is_print_char_value(V) when V =:= $\n -> true;
+is_print_char_value(V) when V =:= $\r -> true;
+is_print_char_value(V) when V =:= $\s -> true;
+is_print_char_value(V) when V =:= $\t -> true;
+is_print_char_value(V) when V =:= $\v -> true;
+is_print_char_value(V) when V =:= $\" -> true;
+is_print_char_value(V) when V =:= $\' -> true;
+is_print_char_value(V) when V =:= $\\ -> true;
+is_print_char_value(_) -> false.
+
+is_char_list([V | Vs]) when integer(V) ->
+ case is_char_value(V) of
+ true ->
+ is_char_list(Vs);
+ false ->
+ false
+ end;
+is_char_list([]) ->
+ true;
+is_char_list(_) ->
+ false.
+
+is_print_char_list([V | Vs]) when integer(V) ->
+ case is_print_char_value(V) of
+ true ->
+ is_print_char_list(Vs);
+ false ->
+ false
+ end;
+is_print_char_list([]) ->
+ true;
+is_print_char_list(_) ->
+ false.
+
+unfold_tuples([{X, Y} | Ps]) ->
+ [X, Y | unfold_tuples(Ps)];
+unfold_tuples([]) ->
+ [].
+
+fold_tuples([X, Y | Es]) ->
+ [{X, Y} | fold_tuples(Es)];
+fold_tuples([]) ->
+ [].
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_clauses.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_clauses.erl
new file mode 100644
index 0000000000..f207178f13
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_clauses.erl
@@ -0,0 +1,409 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Richard Carlsson.
+%% Copyright (C) 1999-2002 Richard Carlsson.
+%% Portions created by Ericsson are Copyright 2001, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: cerl_clauses.erl,v 1.2 2009/09/17 09:46:19 kostis Exp $
+
+%% @doc Utility functions for Core Erlang case/receive clauses.
+%%
+%% <p>Syntax trees are defined in the module <a
+%% href=""><code>cerl</code></a>.</p>
+%%
+%% @type cerl() = cerl:cerl()
+
+-module(cerl_clauses).
+
+-export([any_catchall/1, eval_guard/1, is_catchall/1, match/2,
+ match_list/2, reduce/1, reduce/2]).
+
+-import(cerl, [alias_pat/1, alias_var/1, data_arity/1, data_es/1,
+ data_type/1, clause_guard/1, clause_pats/1, concrete/1,
+ is_data/1, is_c_var/1, let_body/1, letrec_body/1,
+ seq_body/1, try_arg/1, type/1, values_es/1]).
+
+-import(lists, [reverse/1]).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec is_catchall(Clause::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if an abstract clause is a
+%% catch-all, otherwise <code>false</code>. A clause is a catch-all if
+%% all its patterns are variables, and its guard expression always
+%% evaluates to <code>true</code>; cf. <code>eval_guard/1</code>.
+%%
+%% <p>Note: <code>Clause</code> must have type
+%% <code>clause</code>.</p>
+%%
+%% @see eval_guard/1
+%% @see any_catchall/1
+
+is_catchall(C) ->
+ case all_vars(clause_pats(C)) of
+ true ->
+ case eval_guard(clause_guard(C)) of
+ {value, true} ->
+ true;
+ _ ->
+ false
+ end;
+ false ->
+ false
+ end.
+
+all_vars([C | Cs]) ->
+ case is_c_var(C) of
+ true ->
+ all_vars(Cs);
+ false ->
+ false
+ end;
+all_vars([]) ->
+ true.
+
+
+%% @spec any_catchall(Clauses::[cerl()]) -> boolean()
+%%
+%% @doc Returns <code>true</code> if any of the abstract clauses in
+%% the list is a catch-all, otherwise <code>false</code>. See
+%% <code>is_catchall/1</code> for details.
+%%
+%% <p>Note: each node in <code>Clauses</code> must have type
+%% <code>clause</code>.</p>
+%%
+%% @see is_catchall/1
+
+any_catchall([C | Cs]) ->
+ case is_catchall(C) of
+ true ->
+ true;
+ false ->
+ any_catchall(Cs)
+ end;
+any_catchall([]) ->
+ false.
+
+
+%% @spec eval_guard(Expr::cerl()) -> none | {value, term()}
+%%
+%% @doc Tries to reduce a guard expression to a single constant value,
+%% if possible. The returned value is <code>{value, Term}</code> if the
+%% guard expression <code>Expr</code> always yields the constant value
+%% <code>Term</code>, and is otherwise <code>none</code>.
+%%
+%% <p>Note that although guard expressions should only yield boolean
+%% values, this function does not guarantee that <code>Term</code> is
+%% either <code>true</code> or <code>false</code>. Also note that only
+%% simple constructs like let-expressions are examined recursively;
+%% general constant folding is not performed.</p>
+%%
+%% @see is_catchall/1
+
+%% This function could possibly be improved further, but constant
+%% folding should in general be performed elsewhere.
+
+eval_guard(E) ->
+ case type(E) of
+ literal ->
+ {value, concrete(E)};
+ values ->
+ case values_es(E) of
+ [E1] ->
+ eval_guard(E1);
+ _ ->
+ none
+ end;
+ 'try' ->
+ eval_guard(try_arg(E));
+ seq ->
+ eval_guard(seq_body(E));
+ 'let' ->
+ eval_guard(let_body(E));
+ 'letrec' ->
+ eval_guard(letrec_body(E));
+ _ ->
+ none
+ end.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec reduce(Clauses) -> {true, {Clauses, Bindings}}
+%% | {false, Clauses}
+%%
+%% @equiv reduce(Cs, [])
+
+reduce(Cs) ->
+ reduce(Cs, []).
+
+%% @spec reduce(Clauses::[Clause], Exprs::[Expr]) ->
+%% {true, {Clause, Bindings}}
+%% | {false, [Clause]}
+%%
+%% Clause = cerl()
+%% Expr = any | cerl()
+%% Bindings = [{cerl(), cerl()}]
+%%
+%% @doc Selects a single clause, if possible, or otherwise reduces the
+%% list of selectable clauses. The input is a list <code>Clauses</code>
+%% of abstract clauses (i.e., syntax trees of type <code>clause</code>),
+%% and a list of switch expressions <code>Exprs</code>. The function
+%% tries to uniquely select a single clause or discard unselectable
+%% clauses, with respect to the switch expressions. All abstract clauses
+%% in the list must have the same number of patterns. If
+%% <code>Exprs</code> is not the empty list, it must have the same
+%% length as the number of patterns in each clause; see
+%% <code>match_list/2</code> for details.
+%%
+%% <p>A clause can only be selected if its guard expression always
+%% yields the atom <code>true</code>, and a clause whose guard
+%% expression always yields the atom <code>false</code> can never be
+%% selected. Other guard expressions are considered to have unknown
+%% value; cf. <code>eval_guard/1</code>.</p>
+%%
+%% <p>If a particular clause can be selected, the function returns
+%% <code>{true, {Clause, Bindings}}</code>, where <code>Clause</code> is
+%% the selected clause and <code>Bindings</code> is a list of pairs
+%% <code>{Var, SubExpr}</code> associating the variables occurring in
+%% the patterns of <code>Clause</code> with the corresponding
+%% subexpressions in <code>Exprs</code>. The list of bindings is given
+%% in innermost-first order; see the <code>match/2</code> function for
+%% details.</p>
+%%
+%% <p>If no clause could be definitely selected, the function returns
+%% <code>{false, NewClauses}</code>, where <code>NewClauses</code> is
+%% the list of entries in <code>Clauses</code> that remain after
+%% eliminating unselectable clauses, preserving the relative order.</p>
+%%
+%% @see eval_guard/1
+%% @see match/2
+%% @see match_list/2
+
+reduce(Cs, Es) ->
+ reduce(Cs, Es, []).
+
+reduce([C | Cs], Es, Cs1) ->
+ Ps = clause_pats(C),
+ case match_list(Ps, Es) of
+ none ->
+ %% Here, we know that the current clause cannot possibly be
+ %% selected, so we drop it and visit the rest.
+ reduce(Cs, Es, Cs1);
+ {false, _} ->
+ %% We are not sure if this clause might be selected, so we
+ %% save it and visit the rest.
+ reduce(Cs, Es, [C | Cs1]);
+ {true, Bs} ->
+ case eval_guard(clause_guard(C)) of
+ {value, true} when Cs1 == [] ->
+ %% We have a definite match - we return the residual
+ %% expression and signal that a selection has been
+ %% made. All other clauses are dropped.
+ {true, {C, Bs}};
+ {value, true} ->
+ %% Unless one of the previous clauses is selected,
+ %% this clause will definitely be, so we can drop
+ %% the rest.
+ {false, reverse([C | Cs1])};
+ {value, false} ->
+ %% This clause can never be selected, since its
+ %% guard is never 'true', so we drop it.
+ reduce(Cs, Es, Cs1);
+ _ ->
+ %% We are not sure if this clause might be selected
+ %% (or might even cause a crash), so we save it and
+ %% visit the rest.
+ reduce(Cs, Es, [C | Cs1])
+ end
+ end;
+reduce([], _, Cs) ->
+ %% All clauses visited, without a complete match. Signal "not
+ %% reduced" and return the saved clauses, in the correct order.
+ {false, reverse(Cs)}.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec match(Pattern::cerl(), Expr) ->
+%% none | {true, Bindings} | {false, Bindings}
+%%
+%% Expr = any | cerl()
+%% Bindings = [{cerl(), Expr}]
+%%
+%% @doc Matches a pattern against an expression. The returned value is
+%% <code>none</code> if a match is impossible, <code>{true,
+%% Bindings}</code> if <code>Pattern</code> definitely matches
+%% <code>Expr</code>, and <code>{false, Bindings}</code> if a match is
+%% not definite, but cannot be excluded. <code>Bindings</code> is then
+%% a list of pairs <code>{Var, SubExpr}</code>, associating each
+%% variable in the pattern with either the corresponding subexpression
+%% of <code>Expr</code>, or with the atom <code>any</code> if no
+%% matching subexpression exists. (Recall that variables may not be
+%% repeated in a Core Erlang pattern.) The list of bindings is given
+%% in innermost-first order; this should only be of interest if
+%% <code>Pattern</code> contains one or more alias patterns. If the
+%% returned value is <code>{true, []}</code>, it implies that the
+%% pattern and the expression are syntactically identical.
+%%
+%% <p>Instead of a syntax tree, the atom <code>any</code> can be
+%% passed for <code>Expr</code> (or, more generally, be used for any
+%% subtree of <code>Expr</code>, in as much the abstract syntax tree
+%% implementation allows it); this means that it cannot be decided
+%% whether the pattern will match or not, and the corresponding
+%% variable bindings will all map to <code>any</code>. The typical use
+%% is for producing bindings for <code>receive</code> clauses.</p>
+%%
+%% <p>Note: Binary-syntax patterns are never structurally matched
+%% against binary-syntax expressions by this function.</p>
+%%
+%% <p>Examples:
+%% <ul>
+%% <li>Matching a pattern "<code>{X, Y}</code>" against the
+%% expression "<code>{foo, f(Z)}</code>" yields <code>{true,
+%% Bindings}</code> where <code>Bindings</code> associates
+%% "<code>X</code>" with the subtree "<code>foo</code>" and
+%% "<code>Y</code>" with the subtree "<code>f(Z)</code>".</li>
+%%
+%% <li>Matching pattern "<code>{X, {bar, Y}}</code>" against
+%% expression "<code>{foo, f(Z)}</code>" yields <code>{false,
+%% Bindings}</code> where <code>Bindings</code> associates
+%% "<code>X</code>" with the subtree "<code>foo</code>" and
+%% "<code>Y</code>" with <code>any</code> (because it is not known
+%% if "<code>{foo, Y}</code>" might match the run-time value of
+%% "<code>f(Z)</code>" or not).</li>
+%%
+%% <li>Matching pattern "<code>{foo, bar}</code>" against expression
+%% "<code>{foo, f()}</code>" yields <code>{false, []}</code>,
+%% telling us that there might be a match, but we cannot deduce any
+%% bindings.</li>
+%%
+%% <li>Matching <code>{foo, X = {bar, Y}}</code> against expression
+%% "<code>{foo, {bar, baz}}</code>" yields <code>{true,
+%% Bindings}</code> where <code>Bindings</code> associates
+%% "<code>Y</code>" with "<code>baz</code>", and "<code>X</code>"
+%% with "<code>{bar, baz}</code>".</li>
+%%
+%% <li>Matching a pattern "<code>{X, Y}</code>" against
+%% <code>any</code> yields <code>{false, Bindings}</code> where
+%% <code>Bindings</code> associates both "<code>X</code>" and
+%% "<code>Y</code>" with <code>any</code>.</li>
+%% </ul></p>
+
+match(P, E) ->
+ match(P, E, []).
+
+match(P, E, Bs) ->
+ case type(P) of
+ var ->
+ %% Variables always match, since they cannot have repeated
+ %% occurrences in a pattern.
+ {true, [{P, E} | Bs]};
+ alias ->
+ %% All variables in P1 will be listed before the alias
+ %% variable in the result.
+ match(alias_pat(P), E, [{alias_var(P), E} | Bs]);
+ binary ->
+ %% The most we can do is to say "definitely no match" if a
+ %% binary pattern is matched against non-binary data.
+ if E == any ->
+ {false, Bs};
+ true ->
+ case is_data(E) of
+ true ->
+ none;
+ false ->
+ {false, Bs}
+ end
+ end;
+ _ ->
+ match_1(P, E, Bs)
+ end.
+
+match_1(P, E, Bs) ->
+ case is_data(P) of
+ true when E == any ->
+ %% If we don't know the structure of the value of E at this
+ %% point, we just match the subpatterns against 'any', and
+ %% make sure the result is a "maybe".
+ Ps = data_es(P),
+ Es = lists:duplicate(length(Ps), any),
+ case match_list(Ps, Es, Bs) of
+ {_, Bs1} ->
+ {false, Bs1};
+ none ->
+ none
+ end;
+ true ->
+ %% Test if the expression represents a constructor
+ case is_data(E) of
+ true ->
+ T1 = {data_type(E), data_arity(E)},
+ T2 = {data_type(P), data_arity(P)},
+ %% Note that we must test for exact equality.
+ if T1 =:= T2 ->
+ match_list(data_es(P), data_es(E), Bs);
+ true ->
+ none
+ end;
+ false ->
+ %% We don't know the run-time structure of E, and P
+ %% is not a variable or an alias pattern, so we
+ %% match against 'any' instead.
+ match_1(P, any, Bs)
+ end;
+ false ->
+ %% Strange pattern - give up, but don't say "no match".
+ {false, Bs}
+ end.
+
+
+%% @spec match_list(Patterns::[cerl()], Exprs::[Expr]) ->
+%% none | {true, Bindings} | {false, Bindings}
+%%
+%% Expr = any | cerl()
+%% Bindings = [{cerl(), cerl()}]
+%%
+%% @doc Like <code>match/2</code>, but matching a sequence of patterns
+%% against a sequence of expressions. Passing an empty list for
+%% <code>Exprs</code> is equivalent to passing a list of
+%% <code>any</code> atoms of the same length as <code>Patterns</code>.
+%%
+%% @see match/2
+
+match_list([], []) ->
+ {true, []}; % no patterns always match
+match_list(Ps, []) ->
+ match_list(Ps, lists:duplicate(length(Ps), any), []);
+match_list(Ps, Es) ->
+ match_list(Ps, Es, []).
+
+match_list([P | Ps], [E | Es], Bs) ->
+ case match(P, E, Bs) of
+ {true, Bs1} ->
+ match_list(Ps, Es, Bs1);
+ {false, Bs1} ->
+ %% Make sure "maybe" is preserved
+ case match_list(Ps, Es, Bs1) of
+ {_, Bs2} ->
+ {false, Bs2};
+ none ->
+ none
+ end;
+ none ->
+ none
+ end;
+match_list([], [], Bs) ->
+ {true, Bs}.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_inline.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_inline.erl
new file mode 100644
index 0000000000..e040904a19
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_inline.erl
@@ -0,0 +1,2762 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Richard Carlsson.
+%% Copyright (C) 1999-2002 Richard Carlsson.
+%% Portions created by Ericsson are Copyright 2001, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: cerl_inline.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
+%%
+%% Core Erlang inliner.
+
+%% =====================================================================
+%%
+%% This is an implementation of the algorithm by Waddell and Dybvig
+%% ("Fast and Effective Procedure Inlining", International Static
+%% Analysis Symposium 1997), adapted to the Core Erlang language.
+%%
+%% Instead of always renaming variables and function variables, this
+%% implementation uses the "no-shadowing strategy" of Peyton Jones and
+%% Marlow ("Secrets of the Glasgow Haskell Compiler Inliner", 1999).
+%%
+%% =====================================================================
+
+%% TODO: inline single-source-reference operands without size limit.
+
+-module(cerl_inline).
+
+-export([core_transform/2, transform/1, transform/2]).
+
+-import(cerl, [abstract/1, alias_pat/1, alias_var/1, apply_args/1,
+ apply_op/1, atom_name/1, atom_val/1, bitstr_val/1,
+ bitstr_size/1, bitstr_unit/1, bitstr_type/1,
+ bitstr_flags/1, binary_segments/1, update_c_alias/3,
+ update_c_apply/3, update_c_binary/2, update_c_bitstr/6,
+ update_c_call/4, update_c_case/3, update_c_catch/2,
+ update_c_clause/4, c_fun/2, c_int/1, c_let/3,
+ update_c_let/4, update_c_letrec/3, update_c_module/5,
+ update_c_primop/3, update_c_receive/4, update_c_seq/3,
+ c_seq/2, update_c_try/6, c_tuple/1, update_c_values/2,
+ c_values/1, c_var/1, call_args/1, call_module/1,
+ call_name/1, case_arity/1, case_arg/1, case_clauses/1,
+ catch_body/1, clause_body/1, clause_guard/1,
+ clause_pats/1, clause_vars/1, concrete/1, cons_hd/1,
+ cons_tl/1, data_arity/1, data_es/1, data_type/1,
+ fun_body/1, fun_vars/1, get_ann/1, int_val/1,
+ is_c_atom/1, is_c_cons/1, is_c_fun/1, is_c_int/1,
+ is_c_list/1, is_c_seq/1, is_c_tuple/1, is_c_var/1,
+ is_data/1, is_literal/1, is_literal_term/1, let_arg/1,
+ let_body/1, let_vars/1, letrec_body/1, letrec_defs/1,
+ list_length/1, list_elements/1, update_data/3,
+ make_list/1, make_data_skel/2, module_attrs/1,
+ module_defs/1, module_exports/1, module_name/1,
+ primop_args/1, primop_name/1, receive_action/1,
+ receive_clauses/1, receive_timeout/1, seq_arg/1,
+ seq_body/1, set_ann/2, try_arg/1, try_body/1, try_vars/1,
+ try_evars/1, try_handler/1, tuple_es/1, tuple_arity/1,
+ type/1, values_es/1, var_name/1]).
+
+-import(lists, [foldl/3, foldr/3, mapfoldl/3, reverse/1]).
+
+%%
+%% Constants
+%%
+
+debug_runtime() -> false.
+debug_counters() -> false.
+
+%% Normal execution times for inlining are between 0.1 and 0.3 seconds
+%% (on the author's current equipment). The default effort limit of 150
+%% is high enough that most normal programs never hit the limit even
+%% once, and for difficult programs, it generally keeps the execution
+%% times below 2-5 seconds. Using an effort counter of 1000 will thus
+%% have no further effect on most programs, but some programs may take
+%% as much as 10 seconds or more. Effort counts larger than 2500 have
+%% never been observed even on very ill-conditioned programs.
+%%
+%% Size limits between 6 and 18 tend to actually shrink the code,
+%% because of the simplifications made possible by inlining. A limit of
+%% 16 seems to be optimal for this purpose, often shrinking the
+%% executable code by up to 10%. Size limits between 18 and 30 generally
+%% give the same code size as if no inlining was done (i.e., code
+%% duplication balances out the simplifications at these levels). A size
+%% limit between 1 and 5 tends to inline small functions and propagate
+%% constants, but does not cause much simplifications do be done, so the
+%% net effect will be a slight increase in code size. For size limits
+%% above 30, the executable code size tends to increase with about 10%
+%% per 100 units, with some variations depending on the sizes of
+%% functions in the source code.
+%%
+%% Typically, about 90% of the maximum speedup achievable is already
+%% reached using a size limit of 30, and 98% is reached at limits around
+%% 100-150; there is rarely any point in letting the code size increase
+%% by more than 10-15%. If too large functions are inlined, cache
+%% effects will slow the program down.
+
+default_effort() -> 150.
+default_size() -> 24.
+
+%% Base costs/weights for different kinds of expressions. If these are
+%% modified, the size limits above may have to be adjusted.
+
+weight(var) -> 0; % We count no cost for variable accesses.
+weight(values) -> 0; % Value aggregates have no cost in themselves.
+weight(literal) -> 1; % We assume efficient handling of constants.
+weight(data) -> 1; % Base cost; add 1 per element.
+weight(element) -> 1; % Cost of storing/fetching an element.
+weight(argument) -> 1; % Cost of passing a function argument.
+weight('fun') -> 6; % Base cost + average number of free vars.
+weight('let') -> 0; % Count no cost for let-bindings.
+weight(letrec) -> 0; % Like a let-binding.
+weight('case') -> 0; % Case switches have no base cost.
+weight(clause) -> 1; % Count one jump at the end of each clause body.
+weight('receive') -> 9; % Initialization/cleanup cost.
+weight('try') -> 1; % Assume efficient implementation.
+weight('catch') -> 1; % See `try'.
+weight(apply) -> 3; % Average base cost: call/return.
+weight(call) -> 3; % Assume remote-calls as efficient as `apply'.
+weight(primop) -> 2; % Assume more efficient than `apply'.
+weight(binary) -> 4; % Initialisation base cost.
+weight(bitstr) -> 3; % Coding/decoding a value; like a primop.
+weight(module) -> 1. % Like a letrec with a constant body
+
+%% These "reference" structures are used for variables and function
+%% variables. They keep track of the variable name, any bound operand,
+%% and the associated store location.
+
+-record(ref, {name, opnd, loc}).
+
+%% Operand structures contain the operand expression, the renaming and
+%% environment, the state location, and the effort counter at the call
+%% site (cf. `visit').
+
+-record(opnd, {expr, ren, env, loc, effort}).
+
+%% Since expressions are only visited in `effect' context when they are
+%% not bound to a referenced variable, only expressions visited in
+%% 'value' context are cached.
+
+-record(cache, {expr, size}).
+
+%% The context flags for an application structure are kept separate from
+%% the structure itself. Note that the original algorithm had exactly
+%% one operand in each application context structure, while we can have
+%% several, or none.
+
+-record(app, {opnds, ctxt, loc}).
+
+
+%%
+%% Interface functions
+%%
+
+%% Use compile option `{core_transform, inline}' to insert this as a
+%% compilation pass.
+
+core_transform(Code, Opts) ->
+ cerl:to_records(transform(cerl:from_records(Code), Opts)).
+
+transform(Tree) ->
+ transform(Tree, []).
+
+transform(Tree, Opts) ->
+ main(Tree, value, Opts).
+
+main(Tree, Ctxt, Opts) ->
+ %% We spawn a new process to do the work, so we don't have to worry
+ %% about cluttering the process dictionary with debugging info, or
+ %% proper deallocation of ets-tables.
+ Opts1 = Opts ++ [{inline_size, default_size()},
+ {inline_effort, default_effort()}],
+ Reply = self(),
+ Pid = spawn_link(fun () -> start(Reply, Tree, Ctxt, Opts1) end),
+ receive
+ {Pid1, Tree1} when Pid1 == Pid ->
+ Tree1
+ end.
+
+start(Reply, Tree, Ctxt, Opts) ->
+ init_debug(),
+ case debug_runtime() of
+ true ->
+ put(inline_start_time,
+ element(1, erlang:statistics(runtime)));
+ _ ->
+ ok
+ end,
+ Size = max(1, proplists:get_value(inline_size, Opts)),
+ Effort = max(1, proplists:get_value(inline_effort, Opts)),
+ case proplists:get_bool(verbose, Opts) of
+ true ->
+ io:fwrite("Inlining: inline_size=~w inline_effort=~w\n",
+ [Size, Effort]);
+ false ->
+ ok
+ end,
+
+ %% Note that the counters of the new state are passive.
+ S = st__new(Effort, Size),
+
+%%% Initialization is not needed at present. Note that the code in
+%%% `inline_init' is not up-to-date with this module.
+%%% {Tree1, S1} = inline_init:init(Tree, S),
+%%% {Tree2, _S2} = i(Tree1, Ctxt, S1),
+ {Tree2, _S2} = i(Tree, Ctxt, S),
+ report_debug(),
+ Reply ! {self(), Tree2}.
+
+init_debug() ->
+ case debug_counters() of
+ true ->
+ put(counter_effort_triggers, 0),
+ put(counter_effort_max, 0),
+ put(counter_size_triggers, 0),
+ put(counter_size_max, 0);
+ _ ->
+ ok
+ end.
+
+report_debug() ->
+ case debug_runtime() of
+ true ->
+ {Time, _} = erlang:statistics(runtime),
+ report("Total run time for inlining: ~.2.0f s.\n",
+ [(Time - get(inline_start_time))/1000]);
+ _ ->
+ ok
+ end,
+ case debug_counters() of
+ true ->
+ counter_stats();
+ _ ->
+ ok
+ end.
+
+counter_stats() ->
+ T1 = get(counter_effort_triggers),
+ T2 = get(counter_size_triggers),
+ E = get(counter_effort_max),
+ S = get(counter_size_max),
+ M1 = io_lib:fwrite("\tNumber of triggered "
+ "effort counters: ~p.\n", [T1]),
+ M2 = io_lib:fwrite("\tNumber of triggered "
+ "size counters: ~p.\n", [T2]),
+ M3 = io_lib:fwrite("\tLargest active effort counter: ~p.\n",
+ [E]),
+ M4 = io_lib:fwrite("\tLargest active size counter: ~p.\n",
+ [S]),
+ report("Counter statistics:\n~s", [[M1, M2, M3, M4]]).
+
+
+%% =====================================================================
+%% The main inlining function
+%%
+%% i(E :: coreErlang(),
+%% Ctxt :: value | effect | #app{}
+%% Ren :: renaming(),
+%% Env :: environment(),
+%% S :: state())
+%% -> {E', S'}
+%%
+%% Note: It is expected that the input source code ('E') does not
+%% contain free variables. If it does, there is a risk of accidental
+%% name capture, in case a generated "new" variable name happens to be
+%% the same as the name of a variable that is free further below in the
+%% tree; the algorithm only consults the current environment to check if
+%% a name already exists.
+%%
+%% The renaming maps names of source-code variable and function
+%% variables to new names as necessary to avoid clashes, according to
+%% the "no-shadowing" strategy. The environment maps *residual-code*
+%% variables and function variables to operands and global information.
+%% Separating the renaming from the environment, and using the
+%% residual-code variables instead of the source-code variables as its
+%% domain, improves the behaviour of the algorithm when code needs to be
+%% traversed more than once.
+%%
+%% Note that there is no such thing as a `test' context for expressions
+%% in (Core) Erlang (see `i_case' below for details).
+
+i(E, Ctxt, S) ->
+ i(E, Ctxt, ren__identity(), env__empty(), S).
+
+i(E, Ctxt, Ren, Env, S0) ->
+ %% Count one unit of effort on each pass.
+ S = count_effort(1, S0),
+ case is_data(E) of
+ true ->
+ i_data(E, Ctxt, Ren, Env, S);
+ false ->
+ case type(E) of
+ var ->
+ i_var(E, Ctxt, Ren, Env, S);
+ values ->
+ i_values(E, Ctxt, Ren, Env, S);
+ 'fun' ->
+ i_fun(E, Ctxt, Ren, Env, S);
+ seq ->
+ i_seq(E, Ctxt, Ren, Env, S);
+ 'let' ->
+ i_let(E, Ctxt, Ren, Env, S);
+ letrec ->
+ i_letrec(E, Ctxt, Ren, Env, S);
+ 'case' ->
+ i_case(E, Ctxt, Ren, Env, S);
+ 'receive' ->
+ i_receive(E, Ctxt, Ren, Env, S);
+ apply ->
+ i_apply(E, Ctxt, Ren, Env, S);
+ call ->
+ i_call(E, Ctxt, Ren, Env, S);
+ primop ->
+ i_primop(E, Ren, Env, S);
+ 'try' ->
+ i_try(E, Ctxt, Ren, Env, S);
+ 'catch' ->
+ i_catch(E, Ctxt, Ren, Env, S);
+ binary ->
+ i_binary(E, Ren, Env, S);
+ module ->
+ i_module(E, Ctxt, Ren, Env, S)
+ end
+ end.
+
+i_data(E, Ctxt, Ren, Env, S) ->
+ case is_literal(E) of
+ true ->
+ %% This is the `(const c)' case of the original algorithm:
+ %% literal terms which (regardless of size) do not need to
+ %% be constructed dynamically at runtime - boldly assuming
+ %% that the compiler/runtime system can handle this.
+ case Ctxt of
+ effect ->
+ %% Reduce useless constants to a simple value.
+ {void(), count_size(weight(literal), S)};
+ _ ->
+ %% (In Erlang, we cannot set all non-`false'
+ %% constants to `true' in a `test' context, like we
+ %% could do in Lisp or C, so the above is the only
+ %% special case to be handled here.)
+ {E, count_size(weight(literal), S)}
+ end;
+ false ->
+ %% Data constructors are like to calls to safe built-in
+ %% functions, for which we can "decide to inline"
+ %% immediately; there is no need to create operand
+ %% structures. In `effect' context, we can simply make a
+ %% sequence of the argument expressions, also visited in
+ %% `effect' context. In all other cases, the arguments are
+ %% visited for value.
+ case Ctxt of
+ effect ->
+ %% Note that this will count the sizes of the
+ %% subexpressions, even though some or all of them
+ %% might be discarded by the sequencing afterwards.
+ {Es1, S1} = mapfoldl(fun (E, S) ->
+ i(E, effect, Ren, Env,
+ S)
+ end,
+ S, data_es(E)),
+ E1 = foldl(fun (E1, E2) -> make_seq(E1, E2) end,
+ void(), Es1),
+ {E1, S1};
+ _ ->
+ {Es1, S1} = mapfoldl(fun (E, S) ->
+ i(E, value, Ren, Env,
+ S)
+ end,
+ S, data_es(E)),
+ %% The total size/cost is the base cost for a data
+ %% constructor plus the cost for storing each
+ %% element.
+ N = weight(data) + length(Es1) * weight(element),
+ S2 = count_size(N, S1),
+ {update_data(E, data_type(E), Es1), S2}
+ end
+ end.
+
+%% This is the `(ref x)' (variable use) case of the original algorithm.
+%% Note that binding occurrences are always handled in the respective
+%% cases of the binding constructs.
+
+i_var(E, Ctxt, Ren, Env, S) ->
+ case Ctxt of
+ effect ->
+ %% Reduce useless variable references to a simple constant.
+ %% This also avoids useless visiting of bound operands.
+ {void(), count_size(weight(literal), S)};
+ _ ->
+ Name = var_name(E),
+ case env__lookup(ren__map(Name, Ren), Env) of
+ {ok, R} ->
+ case R#ref.opnd of
+ undefined ->
+ %% The variable is not associated with an
+ %% argument expression; just residualize it.
+ residualize_var(R, S);
+ Opnd ->
+ i_var_1(R, Opnd, Ctxt, Env, S)
+ end;
+ error ->
+ %% The variable is unbound. (It has not been
+ %% accidentally captured, however, or it would have
+ %% been in the environment.) We leave it as it is,
+ %% without any warning.
+ {E, count_size(weight(var), S)}
+ end
+ end.
+
+%% This first visits the bound operand and then does copy propagation.
+%% Note that we must first set the "inner-pending" flag, and clear the
+%% flag afterwards.
+
+i_var_1(R, Opnd, Ctxt, Env, S) ->
+ %% If the operand is already "inner-pending", it is residualised.
+ %% (In Lisp/C, if the variable might be assigned to, it should also
+ %% be residualised.)
+ L = Opnd#opnd.loc,
+ case st__test_inner_pending(L, S) of
+ true ->
+ residualize_var(R, S);
+ false ->
+ S1 = st__mark_inner_pending(L, S),
+ case catch {ok, visit(Opnd, S1)} of
+ {ok, {E, S2}} ->
+ %% Note that we pass the current environment and
+ %% context to `copy', but not the current renaming.
+ S3 = st__clear_inner_pending(L, S2),
+ copy(R, Opnd, E, Ctxt, Env, S3);
+ {'EXIT', X} ->
+ exit(X);
+ X ->
+ %% If we use destructive update for the
+ %% `inner-pending' flag, we must make sure to clear
+ %% it also if we make a nonlocal return.
+ st__clear_inner_pending(Opnd#opnd.loc, S1),
+ throw(X)
+ end
+ end.
+
+%% A multiple-value aggregate `<e1, ..., en>'. This is very much like a
+%% tuple data constructor `{e1, ..., en}'; cf. `i_data' for details.
+
+i_values(E, Ctxt, Ren, Env, S) ->
+ case values_es(E) of
+ [E1] ->
+ %% Single-value aggregates can be dropped; they are simply
+ %% notation.
+ i(E1, Ctxt, Ren, Env, S);
+ Es ->
+ %% In `effect' context, we can simply make a sequence of the
+ %% argument expressions, also visited in `effect' context.
+ %% In all other cases, the arguments are visited for value.
+ case Ctxt of
+ effect ->
+ {Es1, S1} =
+ mapfoldl(fun (E, S) ->
+ i(E, effect, Ren, Env, S)
+ end,
+ S, Es),
+ E1 = foldl(fun (E1, E2) ->
+ make_seq(E1, E2)
+ end,
+ void(), Es1),
+ {E1, S1}; % drop annotations on E
+ _ ->
+ {Es1, S1} = mapfoldl(fun (E, S) ->
+ i(E, value, Ren, Env,
+ S)
+ end,
+ S, Es),
+ %% Aggregating values does not write them to memory,
+ %% so we count no extra cost per element.
+ S2 = count_size(weight(values), S1),
+ {update_c_values(E, Es1), S2}
+ end
+ end.
+
+%% A let-expression `let <v1,...,vn> = e0 in e1' is semantically
+%% equivalent to a case-expression `case e0 of <v1,...,vn> when 'true'
+%% -> e1 end'. As a special case, `let <v> = e0 in e1' is also
+%% equivalent to `apply fun (v) -> e0 (e1)'. However, for efficiency,
+%% and in order to allow the handling of `case' clauses to introduce new
+%% let-expressions without entering an infinite rewrite loop, we handle
+%% these directly.
+
+%%% %% Rewriting a `let' to an equivalent expression.
+%%% i_let(E, Ctxt, Ren, Env, S) ->
+%%% case let_vars(E) of
+%%% [V] ->
+%%% E1 = update_c_apply(E, c_fun([V], let_body(E)), [let_arg(E)]),
+%%% i(E1, Ctxt, Ren, Env, S);
+%%% Vs ->
+%%% C = c_clause(Vs, abstract(true), let_body(E)),
+%%% E1 = update_c_case(E, let_arg(E), [C]),
+%%% i(E1, Ctxt, Ren, Env, S)
+%%% end.
+
+i_let(E, Ctxt, Ren, Env, S) ->
+ case let_vars(E) of
+ [V] ->
+ i_let_1(V, E, Ctxt, Ren, Env, S);
+ Vs ->
+ %% Visit the argument expression in `value' context, to
+ %% simplify it as far as possible.
+ {A, S1} = i(let_arg(E), value, Ren, Env, S),
+ case get_components(length(Vs), result(A)) of
+ {true, As} ->
+ %% Note that only the components of the result of
+ %% `A' are passed on; any effects are hoisted.
+ {E1, S2} = i_let_2(Vs, As, E, Ctxt, Ren, Env, S1),
+ {hoist_effects(A, E1), S2};
+ false ->
+ %% We cannot do anything with this `let', since the
+ %% variables cannot be matched against the argument
+ %% components. Just visit the variables for renaming
+ %% and visit the body for value (cf. `i_fun').
+ {_, Ren1, Env1, S2} = bind_locals(Vs, Ren, Env, S1),
+ Vs1 = i_params(Vs, Ren1, Env1),
+ %% The body is always visited for value here.
+ {B, S3} = i(let_body(E), value, Ren1, Env1, S2),
+ S4 = count_size(weight('let'), S3),
+ {update_c_let(E, Vs1, A, B), S4}
+ end
+ end.
+
+%% Single-variable `let' binding.
+
+i_let_1(V, E, Ctxt, Ren, Env, S) ->
+ %% Make an operand structure for the argument expression, create a
+ %% local binding from the parameter to the operand structure, and
+ %% visit the body. Finally create necessary bindings and/or set
+ %% flags.
+ {Opnd, S1} = make_opnd(let_arg(E), Ren, Env, S),
+ {[R], Ren1, Env1, S2} = bind_locals([V], [Opnd], Ren, Env, S1),
+ {E1, S3} = i(let_body(E), Ctxt, Ren1, Env1, S2),
+ i_let_3([R], [Opnd], E1, S3).
+
+%% Multi-variable `let' binding.
+
+i_let_2(Vs, As, E, Ctxt, Ren, Env, S) ->
+ %% Make operand structures for the argument components. Note that
+ %% since the argument has already been visited at this point, we use
+ %% the identity renaming for the operands.
+ {Opnds, S1} = mapfoldl(fun (E, S) ->
+ make_opnd(E, ren__identity(), Env, S)
+ end,
+ S, As),
+ %% Create local bindings from the parameters to their respective
+ %% operand structures, and visit the body.
+ {Rs, Ren1, Env1, S2} = bind_locals(Vs, Opnds, Ren, Env, S1),
+ {E1, S3} = i(let_body(E), Ctxt, Ren1, Env1, S2),
+ i_let_3(Rs, Opnds, E1, S3).
+
+i_let_3(Rs, Opnds, E, S) ->
+ %% Create necessary bindings and/or set flags.
+ {E1, S1} = make_let_bindings(Rs, E, S),
+
+ %% We must also create evaluation for effect, for any unused
+ %% operands, as after an application expression.
+ residualize_operands(Opnds, E1, S1).
+
+%% A sequence `do e1 e2', written `(seq e1 e2)' in the original
+%% algorithm, where `e1' is evaluated for effect only (since its value
+%% is not used), and `e2' yields the final value. Note that we use
+%% `make_seq' to recompose the sequence after visiting the parts.
+
+i_seq(E, Ctxt, Ren, Env, S) ->
+ {E1, S1} = i(seq_arg(E), effect, Ren, Env, S),
+ {E2, S2} = i(seq_body(E), Ctxt, Ren, Env, S1),
+ %% A sequence has no cost in itself.
+ {make_seq(E1, E2), S2}.
+
+
+%% The `case' switch of Core Erlang is rather different from the boolean
+%% `(if e1 e2 e3)' case of the original algorithm, but the central idea
+%% is the same: if, given the simplified switch expression (which is
+%% visited in `value' context - a boolean `test' context would not be
+%% generally useful), there is a clause which could definitely be
+%% selected, such that no clause before it can possibly be selected,
+%% then we can eliminate all other clauses. (And even if this is not the
+%% case, some clauses can often be eliminated.) Furthermore, if a clause
+%% can be selected, we can replace the case-expression (including the
+%% switch expression) with the body of the clause and a set of zero or
+%% more let-bindings of subexpressions of the switch expression. (In the
+%% simplest case, the switch expression is evaluated only for effect.)
+
+i_case(E, Ctxt, Ren, Env, S) ->
+ %% First visit the switch expression in `value' context, to simplify
+ %% it as far as possible. Note that only the result part is passed
+ %% on to the clause matching below; any effects are hoisted.
+ {A, S1} = i(case_arg(E), value, Ren, Env, S),
+ A1 = result(A),
+
+ %% Propagating an application context into the branches could cause
+ %% the arguments of the application to be evaluated *after* the
+ %% switch expression, but *before* the body of the selected clause.
+ %% Such interleaving is not allowed in general, and it does not seem
+ %% worthwile to make a more powerful transformation here. Therefore,
+ %% the clause bodies are conservatively visited for value if the
+ %% context is `application'.
+ Ctxt1 = safe_context(Ctxt),
+ {E1, S2} = case get_components(case_arity(E), A1) of
+ {true, As} ->
+ i_case_1(As, E, Ctxt1, Ren, Env, S1);
+ false ->
+ i_case_1([], E, Ctxt1, Ren, Env, S1)
+ end,
+ {hoist_effects(A, E1), S2}.
+
+i_case_1(As, E, Ctxt, Ren, Env, S) ->
+ case i_clauses(As, case_clauses(E), Ctxt, Ren, Env, S) of
+ {false, {As1, Vs, Env1, Cs}, S1} ->
+ %% We still have a list of clauses. Sanity check:
+ if Cs == [] ->
+ report_warning("empty list of clauses "
+ "in residual program!.\n");
+ true ->
+ ok
+ end,
+ {A, S2} = i(c_values(As1), value, ren__identity(), Env1,
+ S1),
+ {E1, S3} = i_case_2(Cs, A, E, S2),
+ i_case_3(Vs, Env1, E1, S3);
+ {true, {_, Vs, Env1, [C]}, S1} ->
+ %% A single clause was selected; we just take the body.
+ i_case_3(Vs, Env1, clause_body(C), S1)
+ end.
+
+%% Check if all clause bodies are actually equivalent expressions that
+%% do not depent on pattern variables (this sometimes occurs as a
+%% consequence of inlining, e.g., all branches might yield 'true'), and
+%% if so, replace the `case' with a sequence, first evaluating the
+%% clause selection for effect, then evaluating one of the clause bodies
+%% for its value. (Unless the switch contains a catch-all clause, the
+%% clause selection must be evaluated for effect, since there is no
+%% guarantee that any of the clauses will actually match. Assuming that
+%% some clause always matches could make an undefined program produce a
+%% value.) This makes the final size less than what was accounted for
+%% when visiting the clauses, but currently we don't try to adjust for
+%% this.
+
+i_case_2(Cs, A, E, S) ->
+ case equivalent_clauses(Cs) of
+ false ->
+ %% Count the base sizes for the remaining clauses; pattern
+ %% and guard sizes are already counted.
+ N = weight('case') + weight(clause) * length(Cs),
+ S1 = count_size(N, S),
+ {update_c_case(E, A, Cs), S1};
+ true ->
+ case cerl_clauses:any_catchall(Cs) of
+ true ->
+ %% We know that some clause must be selected, so we
+ %% can drop all the testing as well.
+ E1 = make_seq(A, clause_body(hd(Cs))),
+ {E1, S};
+ false ->
+ %% The clause selection must be performed for
+ %% effect.
+ E1 = update_c_case(E, A,
+ set_clause_bodies(Cs, void())),
+ {make_seq(E1, clause_body(hd(Cs))), S}
+ end
+ end.
+
+i_case_3(Vs, Env, E, S) ->
+ %% For the variables bound to the switch expression subexpressions,
+ %% make let bindings or create evaluation for effect.
+ Rs = [env__get(var_name(V), Env) || V <- Vs],
+ {E1, S1} = make_let_bindings(Rs, E, S),
+ Opnds = [R#ref.opnd || R <- Rs],
+ residualize_operands(Opnds, E1, S1).
+
+%% This function takes a sequence of switch expressions `Es' (which can
+%% be the empty list if these are unknown) and a list `Cs' of clauses,
+%% and returns `{Match, {As, Vs, Env1, Cs1}, S1}' where `As' is a list
+%% of residual switch expressions, `Vs' the list of variables used in
+%% the templates, `Env1' the environment for the templates, and `Cs1'
+%% the list of residual clauses. `Match' is `true' if some clause could
+%% be shown to definitely match (in this case, `Cs1' contains exactly
+%% one element), and `false' otherwise. `S1' is the new state. The given
+%% `Ctxt' is the context to be used for visiting the body of clauses.
+%%
+%% Visiting a clause basically amounts to extending the environment for
+%% all variables in the pattern, as for a `fun' (cf. `i_fun'),
+%% propagating match information if possible, and visiting the guard and
+%% body in the new environment.
+%%
+%% To make it cheaper to do handle a set of clauses, and to avoid
+%% unnecessarily exceeding the size limit, we avoid visiting the bodies
+%% of clauses which are subsequently removed, by dividing the visiting
+%% of a clause into two stages: first construct the environment(s) and
+%% visit the pattern (for renaming) and the guard (for value), then
+%% reduce the switch as much as possible, and lastly visit the body.
+
+i_clauses(Cs, Ctxt, Ren, Env, S) ->
+ i_clauses([], Cs, Ctxt, Ren, Env, S).
+
+i_clauses(Es, Cs, Ctxt, Ren, Env, S) ->
+ %% Create templates for the switch expressions.
+ {Ts, {Vs, Env0}} = mapfoldl(fun (E, {Vs, Env}) ->
+ {T, Vs1, Env1} =
+ make_template(E, Env),
+ {T, {Vs1 ++ Vs, Env1}}
+ end,
+ {[], Env}, Es),
+
+ %% Make operand structures for the switch subexpression templates
+ %% (found in `Env0') and add proper ref-structure bindings to the
+ %% environment. Since the subexpressions in general can be
+ %% interdependent (Vs is in reverse-dependency order), the
+ %% environment (and renaming) must be created incrementally. Note
+ %% that since the switch expressions have been visited already, the
+ %% identity renaming is used for the operands.
+ Vs1 = lists:reverse(Vs),
+ {Ren1, Env1, S1} =
+ foldl(fun (V, {Ren, Env, S}) ->
+ E = env__get(var_name(V), Env0),
+ {Opnd, S_1} = make_opnd(E, ren__identity(), Env,
+ S),
+ {_, Ren1, Env1, S_2} = bind_locals([V], [Opnd],
+ Ren, Env, S_1),
+ {Ren1, Env1, S_2}
+ end,
+ {Ren, Env, S}, Vs1),
+
+ %% First we visit the head of each individual clause, renaming
+ %% pattern variables, inserting let-bindings in the guard and body,
+ %% and visiting the guard. The information used for visiting the
+ %% clause body will be prefixed to the clause annotations.
+ {Cs1, S2} = mapfoldl(fun (C, S) ->
+ i_clause_head(C, Ts, Ren1, Env1, S)
+ end,
+ S1, Cs),
+
+ %% Now that the clause guards have been reduced as far as possible,
+ %% we can attempt to reduce the clauses.
+ As = [hd(get_ann(T)) || T <- Ts],
+ case cerl_clauses:reduce(Cs1, Ts) of
+ {false, Cs2} ->
+ %% We still have one or more clauses (with associated
+ %% extended environments). Their bodies have not yet been
+ %% visited, so we do that (in the respective safe
+ %% environments, adding the sizes of the visited heads to
+ %% the current size counter) and return the final list of
+ %% clauses.
+ {Cs3, S3} = mapfoldl(
+ fun (C, S) ->
+ i_clause_body(C, Ctxt, S)
+ end,
+ S2, Cs2),
+ {false, {As, Vs1, Env1, Cs3}, S3};
+ {true, {C, _}} ->
+ %% A clause C could be selected (the bindings have already
+ %% been added to the guard/body). Note that since the clause
+ %% head will probably be discarded, its size is not counted.
+ {C1, Ren2, Env2, _} = get_clause_extras(C),
+ {B, S3} = i(clause_body(C), Ctxt, Ren2, Env2, S2),
+ C2 = update_c_clause(C1, clause_pats(C1), clause_guard(C1), B),
+ {true, {As, Vs1, Env1, [C2]}, S3}
+ end.
+
+%% This visits the head of a clause, renames pattern variables, inserts
+%% let-bindings in the guard and body, and does inlining on the guard
+%% expression. Returns a list of pairs `{NewClause, Data}', where `Data'
+%% is `{Renaming, Environment, Size}' used for visiting the body of the
+%% new clause.
+
+i_clause_head(C, Ts, Ren, Env, S) ->
+ %% Match the templates against the (non-renamed) patterns to get the
+ %% available information about matching subexpressions. We don't
+ %% care at this point whether an exact match/nomatch is detected.
+ Ps = clause_pats(C),
+ Bs = case cerl_clauses:match_list(Ps, Ts) of
+ {_, Bs1} -> Bs1;
+ none -> []
+ end,
+
+ %% The patterns must be visited for renaming; cf. `i_pattern'. We
+ %% use a passive size counter for visiting the patterns and the
+ %% guard (cf. `visit'), because we do not know at this stage whether
+ %% the clause will be kept or not; the final value of the counter is
+ %% included in the returned value below.
+ {_, Ren1, Env1, S1} = bind_locals(clause_vars(C), Ren, Env, S),
+ S2 = new_passive_size(get_size_limit(S1), S1),
+ {Ps1, S3} = mapfoldl(fun (P, S) ->
+ i_pattern(P, Ren1, Env1, Ren, Env, S)
+ end,
+ S2, Ps),
+
+ %% Rewrite guard and body and visit the guard for value. Discard the
+ %% latter size count if the guard turns out to be a constant.
+ G = add_match_bindings(Bs, clause_guard(C)),
+ B = add_match_bindings(Bs, clause_body(C)),
+ {G1, S4} = i(G, value, Ren1, Env1, S3),
+ S5 = case is_literal(G1) of
+ true ->
+ revert_size(S3, S4);
+ false ->
+ S4
+ end,
+
+ %% Revert to the size counter we had on entry to this function. The
+ %% environment and renaming, together with the size of the clause
+ %% head, are prefixed to the annotations for later use.
+ Size = get_size_value(S5),
+ C1 = update_c_clause(C, Ps1, G1, B),
+ {set_clause_extras(C1, Ren1, Env1, Size), revert_size(S, S5)}.
+
+add_match_bindings(Bs, E) ->
+ %% Don't waste time if the variables definitely cannot be used.
+ %% (Most guards are simply `true'.)
+ case is_literal(E) of
+ true ->
+ E;
+ false ->
+ Vs = [V || {V, E} <- Bs, E /= any],
+ Es = [hd(get_ann(E)) || {_V, E} <- Bs, E /= any],
+ c_let(Vs, c_values(Es), E)
+ end.
+
+i_clause_body(C0, Ctxt, S) ->
+ {C, Ren, Env, Size} = get_clause_extras(C0),
+ S1 = count_size(Size, S),
+ {B, S2} = i(clause_body(C), Ctxt, Ren, Env, S1),
+ C1 = update_c_clause(C, clause_pats(C), clause_guard(C), B),
+ {C1, S2}.
+
+get_clause_extras(C) ->
+ [{Ren, Env, Size} | As] = get_ann(C),
+ {set_ann(C, As), Ren, Env, Size}.
+
+set_clause_extras(C, Ren, Env, Size) ->
+ As = [{Ren, Env, Size} | get_ann(C)],
+ set_ann(C, As).
+
+%% This is the `(lambda x e)' case of the original algorithm. A
+%% `fun' is like a lambda expression, but with a varying number of
+%% parameters; possibly zero.
+
+i_fun(E, Ctxt, Ren, Env, S) ->
+ case Ctxt of
+ effect ->
+ %% Reduce useless `fun' expressions to a simple constant;
+ %% visiting the body would be a waste of time, and could
+ %% needlessly mark variables as referenced.
+ {void(), count_size(weight(literal), S)};
+ value ->
+ %% Note that the variables are visited as patterns.
+ Vs = fun_vars(E),
+ {_, Ren1, Env1, S1} = bind_locals(Vs, Ren, Env, S),
+ Vs1 = i_params(Vs, Ren1, Env1),
+
+ %% The body is always visited for value.
+ {B, S2} = i(fun_body(E), value, Ren1, Env1, S1),
+
+ %% We don't bother to include the exact number of free
+ %% variables in the cost for creating a fun-value.
+ S3 = count_size(weight('fun'), S2),
+
+ %% Inlining might have duplicated code, so we must remove
+ %% any 'id'-annotations from the original fun-expression.
+ %% (This forces a later stage to invent new id:s.) This is
+ %% necessary as long as fun:s may still need to be
+ %% identified the old way. Function variables that are not
+ %% in application context also have such annotations, but
+ %% the inlining will currently lose all annotations on
+ %% variable references (I think), so that's not a problem.
+ {set_ann(c_fun(Vs1, B), kill_id_anns(get_ann(E))), S3};
+ #app{} ->
+ %% An application of a fun-expression (in the source code)
+ %% is handled by going directly to `inline'; this is never
+ %% residualised, and we don't set up new counters here. Note
+ %% that inlining of copy-propagated fun-expressions is done
+ %% in `copy'; not here.
+ inline(E, Ctxt, Ren, Env, S)
+ end.
+
+%% A `letrec' requires a circular environment, but is otherwise like a
+%% `let', i.e. like a direct lambda application. Note that only
+%% fun-expressions (lambda abstractions) may occur in the right-hand
+%% side of each definition.
+
+i_letrec(E, Ctxt, Ren, Env, S) ->
+ %% Note that we pass an empty list for the auto-referenced
+ %% (exported) functions here.
+ {Es, B, _, S1} = i_letrec(letrec_defs(E), letrec_body(E), [], Ctxt,
+ Ren, Env, S),
+
+ %% If no bindings remain, only the body is returned.
+ case Es of
+ [] ->
+ {B, S1}; % drop annotations on E
+ _ ->
+ S2 = count_size(weight(letrec), S1),
+ {update_c_letrec(E, Es, B), S2}
+ end.
+
+%% The major part of this is shared by letrec-expressions and module
+%% definitions alike.
+
+i_letrec(Es, B, Xs, Ctxt, Ren, Env, S) ->
+ %% First, we create operands with dummy renamings and environments,
+ %% and with fresh store locations for cached expressions and operand
+ %% info.
+ {Opnds, S1} = mapfoldl(fun ({_, E}, S) ->
+ make_opnd(E, undefined, undefined, S)
+ end,
+ S, Es),
+
+ %% Then we make recursive bindings for the definitions.
+ {Rs, Ren1, Env1, S2} = bind_recursive([F || {F, _} <- Es],
+ Opnds, Ren, Env, S1),
+
+ %% For the function variables listed in Xs (none for a
+ %% letrec-expression), we must make sure that the corresponding
+ %% operand expressions are visited and that the definitions are
+ %% marked as referenced; we also need to return the possibly renamed
+ %% function variables.
+ {Xs1, S3} =
+ mapfoldl(
+ fun (X, S) ->
+ Name = ren__map(var_name(X), Ren1),
+ case env__lookup(Name, Env1) of
+ {ok, R} ->
+ S_1 = i_letrec_export(R, S),
+ {ref_to_var(R), S_1};
+ error ->
+ %% We just skip any exports that are not
+ %% actually defined here, and generate a
+ %% warning message.
+ {N, A} = var_name(X),
+ report_warning("export `~w'/~w "
+ "not defined.\n", [N, A]),
+ {X, S}
+ end
+ end,
+ S2, Xs),
+
+ %% At last, we can then visit the body.
+ {B1, S4} = i(B, Ctxt, Ren1, Env1, S3),
+
+ %% Finally, we create new letrec-bindings for any and all
+ %% residualised definitions. All referenced functions should have
+ %% been visited; the call to `visit' below is expected to retreive a
+ %% cached expression.
+ Rs1 = keep_referenced(Rs, S4),
+ {Es1, S5} = mapfoldl(fun (R, S) ->
+ {E_1, S_1} = visit(R#ref.opnd, S),
+ {{ref_to_var(R), E_1}, S_1}
+ end,
+ S4, Rs1),
+ {Es1, B1, Xs1, S5}.
+
+%% This visits the operand for a function definition exported by a
+%% `letrec' (which is really a `module' module definition, since normal
+%% letrecs have no export declarations). Only the updated state is
+%% returned. We must handle the "inner-pending" flag when doing this;
+%% cf. `i_var'.
+
+i_letrec_export(R, S) ->
+ Opnd = R#ref.opnd,
+ S1 = st__mark_inner_pending(Opnd#opnd.loc, S),
+ {_, S2} = visit(Opnd, S1),
+ {_, S3} = residualize_var(R, st__clear_inner_pending(Opnd#opnd.loc,
+ S2)),
+ S3.
+
+%% This is the `(call e1 e2)' case of the original algorithm. The only
+%% difference is that we must handle multiple (or no) operand
+%% expressions.
+
+i_apply(E, Ctxt, Ren, Env, S) ->
+ {Opnds, S1} = mapfoldl(fun (E, S) ->
+ make_opnd(E, Ren, Env, S)
+ end,
+ S, apply_args(E)),
+
+ %% Allocate a new app-context location and set up an application
+ %% context structure containing the surrounding context.
+ {L, S2} = st__new_app_loc(S1),
+ Ctxt1 = #app{opnds = Opnds, ctxt = Ctxt, loc = L},
+
+ %% Visit the operator expression in the new call context.
+ {E1, S3} = i(apply_op(E), Ctxt1, Ren, Env, S2),
+
+ %% Check the "inlined" flag to find out what to do next. (The store
+ %% location could be recycled after the flag has been tested, but
+ %% there is no real advantage to that, because in practice, only
+ %% 4-5% of all created store locations will ever be reused, while
+ %% there will be a noticable overhead for managing the free list.)
+ case st__get_app_inlined(L, S3) of
+ true ->
+ %% The application was inlined, so we have the final
+ %% expression in `E1'. We just have to handle any operands
+ %% that need to be residualized for effect only (i.e., those
+ %% the values of which are not used).
+ residualize_operands(Opnds, E1, S3);
+ false ->
+ %% Otherwise, `E1' is the residual operator expression. We
+ %% make sure all operands are visited, and rebuild the
+ %% application.
+ {Es, S4} = mapfoldl(fun (Opnd, S) ->
+ visit_and_count_size(Opnd, S)
+ end,
+ S3, Opnds),
+ N = apply_size(length(Es)),
+ {update_c_apply(E, E1, Es), count_size(N, S4)}
+ end.
+
+apply_size(A) ->
+ weight(apply) + weight(argument) * A.
+
+%% Since it is not the task of this transformation to handle
+%% cross-module inlining, all inter-module calls are handled by visiting
+%% the components (the module and function name, and the arguments of
+%% the call) for value. In `effect' context, if the function itself is
+%% known to be completely effect free, the call can be discarded and the
+%% arguments evaluated for effect. Otherwise, if all the visited
+%% arguments are to constants, and the function is known to be safe to
+%% execute at compile time, then we try to evaluate the call. If
+%% evaluation completes normally, the call is replaced by the result;
+%% otherwise the call is residualised.
+
+i_call(E, Ctxt, Ren, Env, S) ->
+ {M, S1} = i(call_module(E), value, Ren, Env, S),
+ {F, S2} = i(call_name(E), value, Ren, Env, S1),
+ As = call_args(E),
+ Arity = length(As),
+
+ %% Check if the name of the called function is static. If so,
+ %% discard the size counts performed above, since the values will
+ %% not cause any runtime cost.
+ Static = is_c_atom(M) and is_c_atom(F),
+ S3 = case Static of
+ true ->
+ revert_size(S, S2);
+ false ->
+ S2
+ end,
+ case Ctxt of
+ effect when Static == true ->
+ case is_safe_call(atom_val(M), atom_val(F), Arity) of
+ true ->
+ %% The result will not be used, and the call is
+ %% effect free, so we create a multiple-value
+ %% aggregate containing the (not yet visited)
+ %% arguments and process that instead.
+ i(c_values(As), effect, Ren, Env, S3);
+ false ->
+ %% We are not allowed to simply discard the call,
+ %% but we can try to evaluate it.
+ i_call_1(Static, M, F, Arity, As, E, Ctxt, Ren, Env,
+ S3)
+ end;
+ _ ->
+ i_call_1(Static, M, F, Arity, As, E, Ctxt, Ren, Env, S3)
+ end.
+
+i_call_1(Static, M, F, Arity, As, E, Ctxt, Ren, Env, S) ->
+ %% Visit the arguments for value.
+ {As1, S1} = mapfoldl(fun (X, A) -> i(X, value, Ren, Env, A) end,
+ S, As),
+ case Static of
+ true ->
+ case erl_bifs:is_pure(atom_val(M), atom_val(F), Arity) of
+ true ->
+ %% It is allowed to evaluate this at compile time.
+ case all_static(As1) of
+ true ->
+ i_call_3(M, F, As1, E, Ctxt, Env, S1);
+ false ->
+ %% See if the call can be rewritten instead.
+ i_call_4(M, F, As1, E, Ctxt, Env, S1)
+ end;
+ false ->
+ i_call_2(M, F, As1, E, S1)
+ end;
+ false ->
+ i_call_2(M, F, As1, E, S1)
+ end.
+
+%% Residualise the call.
+
+i_call_2(M, F, As, E, S) ->
+ N = weight(call) + weight(argument) * length(As),
+ {update_c_call(E, M, F, As), count_size(N, S)}.
+
+%% Attempt to evaluate the call to yield a literal; if that fails, try
+%% to rewrite the expression.
+
+i_call_3(M, F, As, E, Ctxt, Env, S) ->
+ %% Note that we extract the results of argument expessions here; the
+ %% expressions could still be sequences with side effects.
+ Vs = [concrete(result(A)) || A <- As],
+ case catch {ok, apply(atom_val(M), atom_val(F), Vs)} of
+ {ok, V} ->
+ %% Evaluation completed normally - try to turn the result
+ %% back into a syntax tree (representing a literal).
+ case is_literal_term(V) of
+ true ->
+ %% Make a sequence of the arguments (as a
+ %% multiple-value aggregate) and the final value.
+ S1 = count_size(weight(values), S),
+ S2 = count_size(weight(literal), S1),
+ {make_seq(c_values(As), abstract(V)), S2};
+ false ->
+ %% The result could not be represented as a literal.
+ i_call_4(M, F, As, E, Ctxt, Env, S)
+ end;
+ _ ->
+ %% The evaluation attempt did not complete normally.
+ i_call_4(M, F, As, E, Ctxt, Env, S)
+ end.
+
+%% Rewrite the expression, if possible, otherwise residualise it.
+
+i_call_4(M, F, As, E, Ctxt, Env, S) ->
+ case reduce_bif_call(atom_val(M), atom_val(F), As, Env) of
+ false ->
+ %% Nothing more to be done - residualise the call.
+ i_call_2(M, F, As, E, S);
+ {true, E1} ->
+ %% We revisit the result, because the rewriting might have
+ %% opened possibilities for further inlining. Since the
+ %% parts have already been visited once, we use the identity
+ %% renaming here.
+ i(E1, Ctxt, ren__identity(), Env, S)
+ end.
+
+%% For now, we assume that primops cannot be evaluated at compile time,
+%% probably being too special. Also, we have no knowledge about their
+%% side effects.
+
+i_primop(E, Ren, Env, S) ->
+ %% Visit the arguments for value.
+ {As, S1} = mapfoldl(fun (E, S) ->
+ i(E, value, Ren, Env, S)
+ end,
+ S, primop_args(E)),
+ N = weight(primop) + weight(argument) * length(As),
+ {update_c_primop(E, primop_name(E), As), count_size(N, S1)}.
+
+%% This is like having an expression with an extra fun-expression
+%% attached for "exceptional cases"; actually, there are exactly two
+%% parameter variables for the body, but they are easiest handled as if
+%% their number might vary, just as for a `fun'.
+
+i_try(E, Ctxt, Ren, Env, S) ->
+ %% The argument expression is evaluated in `value' context, and the
+ %% surrounding context is propagated into both branches. We do not
+ %% try to recognize cases when the protected expression will
+ %% actually raise an exception. Note that the variables are visited
+ %% as patterns.
+ {A, S1} = i(try_arg(E), value, Ren, Env, S),
+ Vs = try_vars(E),
+ {_, Ren1, Env1, S2} = bind_locals(Vs, Ren, Env, S1),
+ Vs1 = i_params(Vs, Ren1, Env1),
+ {B, S3} = i(try_body(E), Ctxt, Ren1, Env1, S2),
+ case is_safe(A) of
+ true ->
+ %% The `try' wrapper can be dropped in this case. Since the
+ %% expressions have been visited already, the identity
+ %% renaming is used when we revisit the new let-expression.
+ i(c_let(Vs1, A, B), Ctxt, ren__identity(), Env, S3);
+ false ->
+ Evs = try_evars(E),
+ {_, Ren2, Env2, S4} = bind_locals(Evs, Ren, Env, S3),
+ Evs1 = i_params(Evs, Ren2, Env2),
+ {H, S5} = i(try_handler(E), Ctxt, Ren2, Env2, S4),
+ S6 = count_size(weight('try'), S5),
+ {update_c_try(E, A, Vs1, B, Evs1, H), S6}
+ end.
+
+%% A special case of try-expressions:
+
+i_catch(E, Ctxt, Ren, Env, S) ->
+ %% We cannot propagate application contexts into the catch.
+ {E1, S1} = i(catch_body(E), safe_context(Ctxt), Ren, Env, S),
+ case is_safe(E1) of
+ true ->
+ %% The `catch' wrapper can be dropped in this case.
+ {E1, S1};
+ false ->
+ S2 = count_size(weight('catch'), S1),
+ {update_c_catch(E, E1), S2}
+ end.
+
+%% A receive-expression is very much like a case-expression, with the
+%% difference that we do not have access to a switch expression, since
+%% the value being switched on is taken from the mailbox. The fact that
+%% the receive-expression may iterate over an arbitrary number of
+%% messages is not of interest to us. All we can do here is to visit its
+%% subexpressions, and possibly eliminate definitely unselectable
+%% clauses.
+
+i_receive(E, Ctxt, Ren, Env, S) ->
+ %% We first visit the expiry expression (for value) and the expiry
+ %% body (in the surrounding context).
+ {T, S1} = i(receive_timeout(E), value, Ren, Env, S),
+ {B, S2} = i(receive_action(E), Ctxt, Ren, Env, S1),
+
+ %% Then we visit the clauses. Note that application contexts may not
+ %% in general be propagated into the branches (and the expiry body),
+ %% because the execution of the `receive' may remove a message from
+ %% the mailbox as a side effect; the situation is thus analogous to
+ %% that in a `case' expression.
+ Ctxt1 = safe_context(Ctxt),
+ case i_clauses(receive_clauses(E), Ctxt1, Ren, Env, S2) of
+ {false, {[], _, _, Cs}, S3} ->
+ %% We still have a list of clauses. If the list is empty,
+ %% and the expiry expression is the integer zero, the
+ %% expression reduces to the expiry body.
+ if Cs == [] ->
+ case is_c_int(T) andalso (int_val(T) == 0) of
+ true ->
+ {B, S3};
+ false ->
+ i_receive_1(E, Cs, T, B, S3)
+ end;
+ true ->
+ i_receive_1(E, Cs, T, B, S3)
+ end;
+ {true, {_, _, _, Cs}, S3} ->
+ %% Cs is a single clause that will always be matched (if a
+ %% message exists), but we must keep the `receive' statement
+ %% in order to fetch the message from the mailbox.
+ i_receive_1(E, Cs, T, B, S3)
+ end.
+
+i_receive_1(E, Cs, T, B, S) ->
+ %% Here, we just add the base sizes for the receive-expression
+ %% itself and for each remaining clause; cf. `case'.
+ N = weight('receive') + weight(clause) * length(Cs),
+ {update_c_receive(E, Cs, T, B), count_size(N, S)}.
+
+%% A module definition is like a `letrec', with some add-ons (export and
+%% attribute declarations) but without an explicit body. Actually, the
+%% exporting of function names has the same effect as if there was a
+%% body consisting of the list of references to the exported functions.
+%% Thus, the exported functions are exactly those which can be
+%% referenced from outside the module.
+
+i_module(E, Ctxt, Ren, Env, S) ->
+ %% Cf. `i_letrec'. Note that we pass a dummy constant value for the
+ %% "body" parameter.
+ {Es, _, Xs1, S1} = i_letrec(module_defs(E), void(),
+ module_exports(E), Ctxt, Ren, Env, S),
+ %% Sanity check:
+ case Es of
+ [] ->
+ report_warning("no function definitions remaining "
+ "in module `~s'.\n",
+ [atom_name(module_name(E))]);
+ _ ->
+ ok
+ end,
+ E1 = update_c_module(E, module_name(E), Xs1, module_attrs(E), Es),
+ {E1, count_size(weight(module), S1)}.
+
+%% Binary-syntax expressions are too complicated to do anything
+%% interesting with here - that is beyond the scope of this program;
+%% also, their construction could have side effects, so even in effect
+%% context we can't remove them. (We don't bother to identify cases of
+%% "safe" unused binaries which could be removed.)
+
+i_binary(E, Ren, Env, S) ->
+ %% Visit the segments for value.
+ {Es, S1} = mapfoldl(fun (E, S) ->
+ i_bitstr(E, Ren, Env, S)
+ end,
+ S, binary_segments(E)),
+ S2 = count_size(weight(binary), S1),
+ {update_c_binary(E, Es), S2}.
+
+i_bitstr(E, Ren, Env, S) ->
+ %% It is not necessary to visit the Unit, Type and Flags fields,
+ %% since these are always literals.
+ {Val, S1} = i(bitstr_val(E), value, Ren, Env, S),
+ {Size, S2} = i(bitstr_size(E), value, Ren, Env, S1),
+ Unit = bitstr_unit(E),
+ Type = bitstr_type(E),
+ Flags = bitstr_flags(E),
+ S3 = count_size(weight(bitstr), S2),
+ {update_c_bitstr(E, Val, Size, Unit, Type, Flags), S3}.
+
+%% This is a simplified version of `i_pattern', for lists of parameter
+%% variables only. It does not modify the state.
+
+i_params([V | Vs], Ren, Env) ->
+ Name = ren__map(var_name(V), Ren),
+ case env__lookup(Name, Env) of
+ {ok, R} ->
+ [ref_to_var(R) | i_params(Vs, Ren, Env)];
+ error ->
+ report_internal_error("variable `~w' not bound "
+ "in pattern.\n", [Name]),
+ exit(error)
+ end;
+i_params([], _, _) ->
+ [].
+
+%% For ordinary patterns, we just visit to rename variables and count
+%% the size/cost. All occurring binding instances of variables should
+%% already have been added to the renaming and environment; however, to
+%% handle the size expressions of binary-syntax patterns, we must pass
+%% the renaming and environment of the containing expression
+
+i_pattern(E, Ren, Env, Ren0, Env0, S) ->
+ case type(E) of
+ var ->
+ %% Count no size.
+ Name = ren__map(var_name(E), Ren),
+ case env__lookup(Name, Env) of
+ {ok, R} ->
+ {ref_to_var(R), S};
+ error ->
+ report_internal_error("variable `~w' not bound "
+ "in pattern.\n", [Name]),
+ exit(error)
+ end;
+ alias ->
+ %% Count no size.
+ V = alias_var(E),
+ Name = ren__map(var_name(V), Ren),
+ case env__lookup(Name, Env) of
+ {ok, R} ->
+ %% Visit the subpattern and recompose.
+ V1 = ref_to_var(R),
+ {P, S1} = i_pattern(alias_pat(E), Ren, Env, Ren0,
+ Env0, S),
+ {update_c_alias(E, V1, P), S1};
+ error ->
+ report_internal_error("variable `~w' not bound "
+ "in pattern.\n", [Name]),
+ exit(error)
+ end;
+ binary ->
+ {Es, S1} = mapfoldl(fun (E, S) ->
+ i_bitstr_pattern(E, Ren, Env,
+ Ren0, Env0, S)
+ end,
+ S, binary_segments(E)),
+ S2 = count_size(weight(binary), S1),
+ {update_c_binary(E, Es), S2};
+ _ ->
+ case is_literal(E) of
+ true ->
+ {E, count_size(weight(literal), S)};
+ false ->
+ {Es1, S1} = mapfoldl(fun (E, S) ->
+ i_pattern(E, Ren, Env,
+ Ren0, Env0,
+ S)
+ end,
+ S, data_es(E)),
+ %% We assume that in general, the elements of the
+ %% constructor will all be fetched.
+ N = weight(data) + length(Es1) * weight(element),
+ S2 = count_size(N, S1),
+ {update_data(E, data_type(E), Es1), S2}
+ end
+ end.
+
+i_bitstr_pattern(E, Ren, Env, Ren0, Env0, S) ->
+ %% It is not necessary to visit the Unit, Type and Flags fields,
+ %% since these are always literals. The Value field is a limited
+ %% pattern - either a literal or an unbound variable. The Size field
+ %% is a limited expression - either a literal or a variable bound in
+ %% the environment of the containing expression.
+ {Val, S1} = i_pattern(bitstr_val(E), Ren, Env, Ren0, Env0, S),
+ {Size, S2} = i(bitstr_size(E), value, Ren0, Env0, S1),
+ Unit = bitstr_unit(E),
+ Type = bitstr_type(E),
+ Flags = bitstr_flags(E),
+ S3 = count_size(weight(bitstr), S2),
+ {update_c_bitstr(E, Val, Size, Unit, Type, Flags), S3}.
+
+
+%% ---------------------------------------------------------------------
+%% Other central inlining functions
+
+%% It is assumed here that `E' is a fun-expression and the context is an
+%% app-structure. If the inlining might be aborted for some reason, a
+%% corresponding catch should have been set up before entering `inline'.
+%%
+%% Note: if the inlined body is a lambda abstraction, and the
+%% surrounding context of the app-context is also an app-context, the
+%% `inlined' flag of the outermost context will be set before that of
+%% the inner context is set. E.g.: `let F = fun (X) -> fun (Y) -> E in
+%% apply apply F(A)(B)' will propagate the body of F, which is a lambda
+%% abstraction, into the outer application context, which will be
+%% inlined to produce expression `E', and the flag of the outer context
+%% will be set. Upon return, the flag of the inner context will also be
+%% set. However, the flags are then tested in innermost-first order.
+%% Thus, if some inlining attempt is aborted, the `inlined' flags of any
+%% nested app-contexts must be cleared.
+%%
+%% This implementation does nothing to handle inlining of calls to
+%% recursive functions in a smart way. This means that as long as the
+%% size and effort counters do not prevent it, the function body will be
+%% inlined (i.e., the first iteration will be unrolled), and the
+%% recursive calls will be residualized.
+
+inline(E, #app{opnds = Opnds, ctxt = Ctxt, loc = L}, Ren, Env, S) ->
+ %% Check that the arities match:
+ Vs = fun_vars(E),
+ if length(Opnds) /= length(Vs) ->
+ report_error("function called with wrong number "
+ "of arguments!\n"),
+ %% TODO: should really just residualise the call...
+ exit(error);
+ true ->
+ ok
+ end,
+ %% Create local bindings for the parameters to their respective
+ %% operand structures from the app-structure, and visit the body in
+ %% the context saved in the structure.
+ {Rs, Ren1, Env1, S1} = bind_locals(Vs, Opnds, Ren, Env, S),
+ {E1, S2} = i(fun_body(E), Ctxt, Ren1, Env1, S1),
+
+ %% Create necessary bindings and/or set flags.
+ {E2, S3} = make_let_bindings(Rs, E1, S2),
+
+ %% Lastly, flag the application as inlined, since the inlining
+ %% attempt was not aborted before we reached this point.
+ {E2, st__set_app_inlined(L, S3)}.
+
+%% For the (possibly renamed) argument variables to an inlined call,
+%% either create `let' bindings for them, if they are still referenced
+%% in the residual expression (in C/Lisp, also if they are assigned to),
+%% or otherwise (if they are not referenced or assigned) mark them for
+%% evaluation for side effects.
+
+make_let_bindings([R | Rs], E, S) ->
+ {E1, S1} = make_let_bindings(Rs, E, S),
+ make_let_binding(R, E1, S1);
+make_let_bindings([], E, S) ->
+ {E, S}.
+
+make_let_binding(R, E, S) ->
+ %% The `referenced' flag is conservatively computed. We therefore
+ %% first check some simple cases where parameter R is definitely not
+ %% referenced in the resulting body E.
+ case is_literal(E) of
+ true ->
+ %% A constant contains no variable references.
+ make_let_binding_1(R, E, S);
+ false ->
+ case is_c_var(E) of
+ true ->
+ case var_name(E) =:= R#ref.name of
+ true ->
+ %% The body is simply the parameter variable
+ %% itself. Visit the operand for value and
+ %% substitute the result for the body.
+ visit_and_count_size(R#ref.opnd, S);
+ false ->
+ %% Not the same variable, so the parameter
+ %% is not referenced at all.
+ make_let_binding_1(R, E, S)
+ end;
+ false ->
+ %% Proceed to check the `referenced' flag.
+ case st__get_var_referenced(R#ref.loc, S) of
+ true ->
+ %% The parameter is probably referenced in
+ %% the residual code (although it might not
+ %% be). Visit the operand for value and
+ %% create a let-binding.
+ {E1, S1} = visit_and_count_size(R#ref.opnd,
+ S),
+ S2 = count_size(weight('let'), S1),
+ {c_let([ref_to_var(R)], E1, E), S2};
+ false ->
+ %% The parameter is definitely not
+ %% referenced.
+ make_let_binding_1(R, E, S)
+ end
+ end
+ end.
+
+%% This marks the operand for evaluation for effect.
+
+make_let_binding_1(R, E, S) ->
+ Opnd = R#ref.opnd,
+ {E, st__set_opnd_effect(Opnd#opnd.loc, S)}.
+
+%% Here, `R' is the ref-structure which is the target of the copy
+%% propagation, and `Opnd' is a visited operand structure, to be
+%% propagated through `R' if possible - if not, `R' is residualised.
+%% `Opnd' is normally the operand that `R' is bound to, and `E' is the
+%% result of visiting `Opnd' for value; we pass this as an argument so
+%% we don't have to fetch it multiple times (because we don't have
+%% constant time access).
+%%
+%% We also pass the environment of the site of the variable reference,
+%% for use when inlining a propagated fun-expression. In the original
+%% algorithm by Waddell, the environment used for inlining such cases is
+%% the identity mapping, because the fun-expression body has already
+%% been visited for value, and their algorithm combines renaming of
+%% source-code variables with the looking up of information about
+%% residual-code variables. We, however, need to check the environment
+%% of the call site when creating new non-shadowed variables, but we
+%% must avoid repeated renaming. We therefore separate the renaming and
+%% the environment (as in the renaming algorithm of Peyton-Jones and
+%% Marlow). This also makes our implementation more general, compared to
+%% the original algorithm, because we do not give up on propagating
+%% variables that were free in the fun-body.
+%%
+%% Example:
+%%
+%% let F = fun (X) -> {'foo', X} in
+%% let G = fun (H) -> apply H(F) % F is free in the fun G
+%% in apply G(fun (F) -> apply F(42))
+%% =>
+%% let F = fun (X) -> {'foo', X} in
+%% apply (fun (H) -> apply H(F))(fun (F) -> apply F(42))
+%% =>
+%% let F = fun (X) -> {'foo', X} in
+%% apply (fun (F) -> apply F(42))(F)
+%% =>
+%% let F = fun (X) -> {'foo', X} in
+%% apply F(42)
+%% =>
+%% apply (fun (X) -> {'foo', X})(2)
+%% =>
+%% {'foo', 42}
+%%
+%% The original algorithm would give up at stage 4, because F was free
+%% in the propagated fun-expression. Our version inlines this example
+%% completely.
+
+copy(R, Opnd, E, Ctxt, Env, S) ->
+ case is_c_var(E) of
+ true ->
+ %% The operand reduces to another variable - get its
+ %% ref-structure and attempt to propagate further.
+ copy_var(env__get(var_name(E), Opnd#opnd.env), Ctxt, Env,
+ S);
+ false ->
+ %% Apart from variables and functional values (the latter
+ %% are handled by `copy_1' below), only constant literals
+ %% are copyable in general; other things, including e.g.
+ %% tuples `{foo, X}', could cause duplication of work, and
+ %% are not copy propagated.
+ case is_literal(E) of
+ true ->
+ {E, count_size(weight(literal), S)};
+ false ->
+ copy_1(R, Opnd, E, Ctxt, Env, S)
+ end
+ end.
+
+copy_var(R, Ctxt, Env, S) ->
+ %% (In Lisp or C, if this other variable might be assigned to, we
+ %% should residualize the "parent" instead, so we don't bypass any
+ %% destructive updates.)
+ case R#ref.opnd of
+ undefined ->
+ %% This variable is not bound to an expression, so just
+ %% residualize it.
+ residualize_var(R, S);
+ Opnd ->
+ %% Note that because operands are always visited before
+ %% copied, all copyable operand expressions will be
+ %% propagated through any number of bindings. If `R' was
+ %% bound to a constant literal, we would never have reached
+ %% this point.
+ case st__lookup_opnd_cache(Opnd#opnd.loc, S) of
+ error ->
+ %% The result for this operand is not yet ready
+ %% (which should mean that it is a recursive
+ %% reference). Thus, we must residualise the
+ %% variable.
+ residualize_var(R, S);
+ {ok, #cache{expr = E1}} ->
+ %% The result for the operand is ready, so we can
+ %% proceed to propagate it.
+ copy_1(R, Opnd, E1, Ctxt, Env, S)
+ end
+ end.
+
+copy_1(R, Opnd, E, Ctxt, Env, S) ->
+ %% Fun-expression (lambdas) are a bit special; they are copyable,
+ %% but should preferably not be duplicated, so they should not be
+ %% copy propagated except into application contexts, where they can
+ %% be inlined.
+ case is_c_fun(E) of
+ true ->
+ case Ctxt of
+ #app{} ->
+ %% First test if the operand is "outer-pending"; if
+ %% so, don't inline.
+ case st__test_outer_pending(Opnd#opnd.loc, S) of
+ false ->
+ copy_inline(R, Opnd, E, Ctxt, Env, S);
+ true ->
+ %% Cyclic reference forced inlining to stop
+ %% (avoiding infinite unfolding).
+ residualize_var(R, S)
+ end;
+ _ ->
+ residualize_var(R, S)
+ end;
+ false ->
+ %% We have no other cases to handle here
+ residualize_var(R, S)
+ end.
+
+%% This inlines a function value that was propagated to an application
+%% context. The inlining is done with an identity renaming (since the
+%% expression is already visited) but in the environment of the call
+%% site (which is OK because of the no-shadowing strategy for renaming,
+%% and because the domain of our environments are the residual-program
+%% variables instead of the source-program variables). Note that we must
+%% first set the "outer-pending" flag, and clear it afterwards.
+
+copy_inline(R, Opnd, E, Ctxt, Env, S) ->
+ S1 = st__mark_outer_pending(Opnd#opnd.loc, S),
+ case catch {ok, copy_inline_1(R, E, Ctxt, Env, S1)} of
+ {ok, {E1, S2}} ->
+ {E1, st__clear_outer_pending(Opnd#opnd.loc, S2)};
+ {'EXIT', X} ->
+ exit(X);
+ X ->
+ %% If we use destructive update for the `outer-pending'
+ %% flag, we must make sure to clear it upon a nonlocal
+ %% return.
+ st__clear_outer_pending(Opnd#opnd.loc, S1),
+ throw(X)
+ end.
+
+%% If the current effort counter was passive, we use a new active effort
+%% counter with the inherited limit for this particular inlining.
+
+copy_inline_1(R, E, Ctxt, Env, S) ->
+ case effort_is_active(S) of
+ true ->
+ copy_inline_2(R, E, Ctxt, Env, S);
+ false ->
+ S1 = new_active_effort(get_effort_limit(S), S),
+ case catch {ok, copy_inline_2(R, E, Ctxt, Env, S1)} of
+ {ok, {E1, S2}} ->
+ %% Revert to the old effort counter.
+ {E1, revert_effort(S, S2)};
+ {counter_exceeded, effort, _} ->
+ %% Aborted this inlining attempt because too much
+ %% effort was spent. Residualize the variable and
+ %% revert to the previous state.
+ residualize_var(R, S);
+ {'EXIT', X} ->
+ exit(X);
+ X ->
+ throw(X)
+ end
+ end.
+
+%% Regardless of whether the current size counter is active or not, we
+%% use a new active size counter for each inlining. If the current
+%% counter was passive, the new counter gets the inherited size limit;
+%% if it was active, the size limit of the new counter will be equal to
+%% the remaining budget of the current counter (which itself is not
+%% affected by the inlining). This distributes the size budget more
+%% evenly over "inlinings within inlinings", so that the whole size
+%% budget is not spent on the first few call sites (in an inlined
+%% function body) forcing the remaining call sites to be residualised.
+
+copy_inline_2(R, E, Ctxt, Env, S) ->
+ Limit = case size_is_active(S) of
+ true ->
+ get_size_limit(S) - get_size_value(S);
+ false ->
+ get_size_limit(S)
+ end,
+ %% Add the cost of the application to the new size limit, so we
+ %% always inline functions that are small enough, even if `Limit' is
+ %% close to zero at this point. (This is an extension to the
+ %% original algorithm.)
+ S1 = new_active_size(Limit + apply_size(length(Ctxt#app.opnds)), S),
+ case catch {ok, inline(E, Ctxt, ren__identity(), Env, S1)} of
+ {ok, {E1, S2}} ->
+ %% Revert to the old size counter.
+ {E1, revert_size(S, S2)};
+ {counter_exceeded, size, S2} ->
+ %% Aborted this inlining attempt because it got too big.
+ %% Residualize the variable and revert to the old size
+ %% counter. (It is important that we do not also revert the
+ %% effort counter here. Because the effort and size counters
+ %% are always set up together, we know that the effort
+ %% counter returned in S2 is the same that was passed to
+ %% `inline'.)
+ S3 = revert_size(S, S2),
+ %% If we use destructive update for the `inlined' flag, we
+ %% must make sure to clear the flags of any nested
+ %% app-contexts upon aborting; see `inline' for details.
+ reset_nested_apps(Ctxt, S3), % for effect
+ residualize_var(R, S3);
+ {'EXIT', X} ->
+ exit(X);
+ X ->
+ throw(X)
+ end.
+
+reset_nested_apps(#app{ctxt = Ctxt, loc = L}, S) ->
+ reset_nested_apps(Ctxt, st__clear_app_inlined(L, S));
+reset_nested_apps(_, S) ->
+ S.
+
+
+%% ---------------------------------------------------------------------
+%% Support functions
+
+new_var(Env) ->
+ Name = env__new_vname(Env),
+ c_var(Name).
+
+residualize_var(R, S) ->
+ S1 = count_size(weight(var), S),
+ {ref_to_var(R), st__set_var_referenced(R#ref.loc, S1)}.
+
+%% This function returns the value-producing subexpression of any
+%% expression. (Except for sequencing expressions, this is the
+%% expression itself.)
+
+result(E) ->
+ case is_c_seq(E) of
+ true ->
+ %% Also see `make_seq', which is used in all places to build
+ %% sequences so that they are always nested in the first
+ %% position.
+ seq_body(E);
+ false ->
+ E
+ end.
+
+%% This function rewrites E to `do A1 E' if A is `do A1 A2', and
+%% otherwise returns E unchanged.
+
+hoist_effects(A, E) ->
+ case type(A) of
+ seq -> make_seq(seq_arg(A), E);
+ _ -> E
+ end.
+
+%% This "build sequencing expression" operation assures that sequences
+%% are always nested in the first position, which makes it easy to find
+%% the actual value-producing expression of a sequence (cf. `result').
+
+make_seq(E1, E2) ->
+ case is_safe(E1) of
+ true ->
+ %% The first expression can safely be dropped.
+ E2;
+ false ->
+ %% If `E1' is a sequence whose final expression has no side
+ %% effects, then we can lose *that* expression when we
+ %% compose the new sequence, since its value will not be
+ %% used.
+ E3 = case is_c_seq(E1) of
+ true ->
+ case is_safe(seq_body(E1)) of
+ true ->
+ %% Drop the final expression.
+ seq_arg(E1);
+ false ->
+ E1
+ end;
+ false ->
+ E1
+ end,
+ case is_c_seq(E2) of
+ true ->
+ %% `E2' is a sequence (E2' E2''), so we must
+ %% rearrange the nesting to ((E1, E2') E2''), to
+ %% preserve the invariant. Annotations on `E2' are
+ %% lost.
+ c_seq(c_seq(E3, seq_arg(E2)), seq_body(E2));
+ false ->
+ c_seq(E3, E2)
+ end
+ end.
+
+%% Currently, safe expressions include variables, lambda expressions,
+%% constructors with safe subexpressions (this includes atoms, integers,
+%% empty lists, etc.), seq-, let- and letrec-expressions with safe
+%% subexpressions, try- and catch-expressions with safe subexpressions
+%% and calls to safe functions with safe argument subexpressions.
+%% Binaries seem too tricky to be considered.
+
+is_safe(E) ->
+ case is_data(E) of
+ true ->
+ is_safe_list(data_es(E));
+ false ->
+ case type(E) of
+ var ->
+ true;
+ 'fun' ->
+ true;
+ values ->
+ is_safe_list(values_es(E));
+ 'seq' ->
+ case is_safe(seq_arg(E)) of
+ true ->
+ is_safe(seq_body(E));
+ false ->
+ false
+ end;
+ 'let' ->
+ case is_safe(let_arg(E)) of
+ true ->
+ is_safe(let_body(E));
+ false ->
+ false
+ end;
+ letrec ->
+ is_safe(letrec_body(E));
+ 'try' ->
+ %% If the argument expression is not safe, it could
+ %% be modifying the state; thus, even if the body is
+ %% safe, the try-expression as a whole would not be.
+ %% If the argument is safe, the handler is not used.
+ case is_safe(try_arg(E)) of
+ true ->
+ is_safe(try_body(E));
+ false ->
+ false
+ end;
+ 'catch' ->
+ is_safe(catch_body(E));
+ call ->
+ M = call_module(E),
+ F = call_name(E),
+ case is_c_atom(M) and is_c_atom(F) of
+ true ->
+ As = call_args(E),
+ case is_safe_list(As) of
+ true ->
+ is_safe_call(atom_val(M),
+ atom_val(F),
+ length(As));
+ false ->
+ false
+ end;
+ false ->
+ false
+ end;
+ _ ->
+ false
+ end
+ end.
+
+is_safe_list([E | Es]) ->
+ case is_safe(E) of
+ true ->
+ is_safe_list(Es);
+ false ->
+ false
+ end;
+is_safe_list([]) ->
+ true.
+
+is_safe_call(M, F, A) ->
+ erl_bifs:is_safe(M, F, A).
+
+%% When setting up local variables, we only create new names if we have
+%% to, according to the "no-shadowing" strategy.
+
+make_locals(Vs, Ren, Env) ->
+ make_locals(Vs, [], Ren, Env).
+
+make_locals([V | Vs], As, Ren, Env) ->
+ Name = var_name(V),
+ case env__is_defined(Name, Env) of
+ false ->
+ %% The variable need not be renamed. Just make sure that the
+ %% renaming will map it to itself.
+ Name1 = Name,
+ Ren1 = ren__add_identity(Name, Ren);
+ true ->
+ %% The variable must be renamed to maintain the no-shadowing
+ %% invariant. Do the right thing for function variables.
+ Name1 = case Name of
+ {A, N} ->
+ env__new_fname(A, N, Env);
+ _ ->
+ env__new_vname(Env)
+ end,
+ Ren1 = ren__add(Name, Name1, Ren)
+ end,
+ %% This temporary binding is added for correct new-key generation.
+ Env1 = env__bind(Name1, dummy, Env),
+ make_locals(Vs, [Name1 | As], Ren1, Env1);
+make_locals([], As, Ren, Env) ->
+ {reverse(As), Ren, Env}.
+
+%% This adds let-bindings for the source code variables in `Es' to the
+%% environment `Env'.
+%%
+%% Note that we always assign a new state location for the
+%% residual-program variable, since we cannot know when a location for a
+%% particular variable in the source code can be reused.
+
+bind_locals(Vs, Ren, Env, S) ->
+ Opnds = lists:duplicate(length(Vs), undefined),
+ bind_locals(Vs, Opnds, Ren, Env, S).
+
+bind_locals(Vs, Opnds, Ren, Env, S) ->
+ {Ns, Ren1, Env1} = make_locals(Vs, Ren, Env),
+ {Rs, Env2, S1} = bind_locals_1(Ns, Opnds, [], Env1, S),
+ {Rs, Ren1, Env2, S1}.
+
+%% Note that the `Vs' are currently not used for anything except the
+%% number of variables. If we were maintaining "source-referenced"
+%% flags, then the flag in the new variable should be initialized to the
+%% current value of the (residual-) referenced-flag of the "parent".
+
+bind_locals_1([N | Ns], [Opnd | Opnds], Rs, Env, S) ->
+ {R, S1} = new_ref(N, Opnd, S),
+ Env1 = env__bind(N, R, Env),
+ bind_locals_1(Ns, Opnds, [R | Rs], Env1, S1);
+bind_locals_1([], [], Rs, Env, S) ->
+ {lists:reverse(Rs), Env, S}.
+
+new_refs(Ns, Opnds, S) ->
+ new_refs(Ns, Opnds, [], S).
+
+new_refs([N | Ns], [Opnd | Opnds], Rs, S) ->
+ {R, S1} = new_ref(N, Opnd, S),
+ new_refs(Ns, Opnds, [R | Rs], S1);
+new_refs([], [], Rs, S) ->
+ {lists:reverse(Rs), S}.
+
+new_ref(N, Opnd, S) ->
+ {L, S1} = st__new_ref_loc(S),
+ {#ref{name = N, opnd = Opnd, loc = L}, S1}.
+
+%% This adds recursive bindings for the source code variables in `Es' to
+%% the environment `Env'. Note that recursive binding of a set of
+%% variables is an atomic operation on the environment - they cannot be
+%% added one at a time.
+
+bind_recursive(Vs, Opnds, Ren, Env, S) ->
+ {Ns, Ren1, Env1} = make_locals(Vs, Ren, Env),
+ {Rs, S1} = new_refs(Ns, Opnds, S),
+
+ %% When this fun-expression is evaluated, it updates the operand
+ %% structure in the ref-structure to contain the recursively defined
+ %% environment and the correct renaming.
+ Fun = fun (R, Env) ->
+ Opnd = R#ref.opnd,
+ R#ref{opnd = Opnd#opnd{ren = Ren1, env = Env}}
+ end,
+ {Rs, Ren1, env__bind_recursive(Ns, Rs, Fun, Env1), S1}.
+
+safe_context(Ctxt) ->
+ case Ctxt of
+ #app{} ->
+ value;
+ _ ->
+ Ctxt
+ end.
+
+%% Note that the name of a variable encodes its type: a "plain" variable
+%% or a function variable. The latter kind also contains an arity number
+%% which should be preserved upon renaming.
+
+ref_to_var(#ref{name = Name}) ->
+ %% If we were maintaining "source-referenced" flags, the annotation
+ %% `add_ann([#source_ref{loc = L}], E)' should also be done here, to
+ %% make the algorithm reapplicable. This is however not necessary
+ %% since there are no destructive variable assignments in Erlang.
+ c_var(Name).
+
+%% Including the effort counter of the call site assures that the cost
+%% of processing an operand via `visit' is charged to the correct
+%% counter. In particular, if the effort counter of the call site was
+%% passive, the operands will also be processed with a passive counter.
+
+make_opnd(E, Ren, Env, S) ->
+ {L, S1} = st__new_opnd_loc(S),
+ C = st__get_effort(S1),
+ Opnd = #opnd{expr = E, ren = Ren, env = Env, loc = L, effort = C},
+ {Opnd, S1}.
+
+keep_referenced(Rs, S) ->
+ [R || R <- Rs, st__get_var_referenced(R#ref.loc, S)].
+
+residualize_operands(Opnds, E, S) ->
+ foldr(fun (Opnd, {E, S}) -> residualize_operand(Opnd, E, S) end,
+ {E, S}, Opnds).
+
+%% This is the only case where an operand expression can be visited in
+%% `effect' context instead of `value' context.
+
+residualize_operand(Opnd, E, S) ->
+ case st__get_opnd_effect(Opnd#opnd.loc, S) of
+ true ->
+ %% The operand has not been visited, so we do that now, but
+ %% in `effect' context. (Waddell's algoritm does some stuff
+ %% here to account specially for the operand size, which
+ %% appears unnecessary.)
+ {E1, S1} = i(Opnd#opnd.expr, effect, Opnd#opnd.ren,
+ Opnd#opnd.env, S),
+ {make_seq(E1, E), S1};
+ false ->
+ {E, S}
+ end.
+
+%% The `visit' function always visits the operand expression in `value'
+%% context (`residualize_operand' visits an unreferenced operand
+%% expression in `effect' context when necessary). A new passive size
+%% counter is used for visiting the operand, the final value of which is
+%% then cached along with the resulting expression.
+%%
+%% Note that the effort counter of the call site, included in the
+%% operand structure, is not a shared object. Thus, the effort budget is
+%% actually reused over all occurrences of the operands of a single
+%% application. This does not appear to be a problem; just a
+%% modification of the algorithm.
+
+visit(Opnd, S) ->
+ {C, S1} = visit_1(Opnd, S),
+ {C#cache.expr, S1}.
+
+visit_and_count_size(Opnd, S) ->
+ {C, S1} = visit_1(Opnd, S),
+ {C#cache.expr, count_size(C#cache.size, S1)}.
+
+visit_1(Opnd, S) ->
+ case st__lookup_opnd_cache(Opnd#opnd.loc, S) of
+ error ->
+ %% Use a new, passive, size counter for visiting operands,
+ %% and use the effort counter of the context of the operand.
+ %% It turns out that if the latter is active, it must be the
+ %% same object as the one currently used, and if it is
+ %% passive, it does not matter if it is the same object as
+ %% any other counter.
+ Effort = Opnd#opnd.effort,
+ Active = counter__is_active(Effort),
+ S1 = case Active of
+ true ->
+ S; % don't change effort counter
+ false ->
+ st__set_effort(Effort, S)
+ end,
+ S2 = new_passive_size(get_size_limit(S1), S1),
+
+ %% Visit the expression and cache the result, along with the
+ %% final value of the size counter.
+ {E, S3} = i(Opnd#opnd.expr, value, Opnd#opnd.ren,
+ Opnd#opnd.env, S2),
+ Size = get_size_value(S3),
+ C = #cache{expr = E, size = Size},
+ S4 = revert_size(S, st__set_opnd_cache(Opnd#opnd.loc, C,
+ S3)),
+ case Active of
+ true ->
+ {C, S4}; % keep using the same effort counter
+ false ->
+ {C, revert_effort(S, S4)}
+ end;
+ {ok, C} ->
+ {C, S}
+ end.
+
+%% Create a pattern matching template for an expression. A template
+%% contains only data constructors (including atomic ones) and
+%% variables, and compound literals are not folded into a single node.
+%% Each node in the template is annotated with the variable which holds
+%% the corresponding subexpression; these are new, unique variables not
+%% existing in the given `Env'. Returns `{Template, Variables, NewEnv}',
+%% where `Variables' is the list of all variables corresponding to nodes
+%% in the template *listed in reverse dependency order*, and `NewEnv' is
+%% `Env' augmented with mappings from the variable names to
+%% subexpressions of `E' (not #ref{} structures!) rewritten so that no
+%% computations are duplicated. `Variables' is guaranteed to be nonempty
+%% - at least the root node will always be bound to a new variable.
+
+make_template(E, Env) ->
+ make_template(E, [], Env).
+
+make_template(E, Vs0, Env0) ->
+ case is_data(E) of
+ true ->
+ {Ts, {Vs1, Env1}} = mapfoldl(
+ fun (E, {Vs0, Env0}) ->
+ {T, Vs1, Env1} =
+ make_template(E, Vs0,
+ Env0),
+ {T, {Vs1, Env1}}
+ end,
+ {Vs0, Env0}, data_es(E)),
+ T = make_data_skel(data_type(E), Ts),
+ E1 = update_data(E, data_type(E),
+ [hd(get_ann(T)) || T <- Ts]),
+ V = new_var(Env1),
+ Env2 = env__bind(var_name(V), E1, Env1),
+ {set_ann(T, [V]), [V | Vs1], Env2};
+ false ->
+ case type(E) of
+ seq ->
+ %% For a sequencing, we can rebind the variable used
+ %% for the body, and pass on the template as it is.
+ {T, Vs1, Env1} = make_template(seq_body(E), Vs0,
+ Env0),
+ V = var_name(hd(get_ann(T))),
+ E1 = update_c_seq(E, seq_arg(E), env__get(V, Env1)),
+ Env2 = env__bind(V, E1, Env1),
+ {T, Vs1, Env2};
+ _ ->
+ V = new_var(Env0),
+ Env1 = env__bind(var_name(V), E, Env0),
+ {set_ann(V, [V]), [V | Vs0], Env1}
+ end
+ end.
+
+%% Two clauses are equivalent if their bodies are equivalent expressions
+%% given that the respective pattern variables are local.
+
+equivalent_clauses([]) ->
+ true;
+equivalent_clauses([C | Cs]) ->
+ Env = cerl_trees:variables(c_values(clause_pats(C))),
+ equivalent_clauses_1(clause_body(C), Cs, Env).
+
+equivalent_clauses_1(E, [C | Cs], Env) ->
+ Env1 = cerl_trees:variables(c_values(clause_pats(C))),
+ case equivalent(E, clause_body(C), ordsets:union(Env, Env1)) of
+ true ->
+ equivalent_clauses_1(E, Cs, Env);
+ false ->
+ false
+ end;
+equivalent_clauses_1(_, [], _Env) ->
+ true.
+
+%% Two expressions are equivalent if and only if they yield the same
+%% value and has the same side effects in the same order. Currently, we
+%% only accept equality between constructors (constants) and nonlocal
+%% variables, since this should cover most cases of interest. If a
+%% variable is locally bound in one expression, it cannot be equivalent
+%% to one with the same name in the other expression, so we need not
+%% keep track of two environments.
+
+equivalent(E1, E2, Env) ->
+ case is_data(E1) of
+ true ->
+ case is_data(E2) of
+ true ->
+ T1 = {data_type(E1), data_arity(E1)},
+ T2 = {data_type(E2), data_arity(E2)},
+ %% Note that we must test for exact equality.
+ if T1 =:= T2 ->
+ equivalent_lists(data_es(E1), data_es(E2),
+ Env);
+ true ->
+ false
+ end;
+ false ->
+ false
+ end;
+ false ->
+ case type(E1) of
+ var ->
+ case is_c_var(E2) of
+ true ->
+ N1 = var_name(E1),
+ N2 = var_name(E2),
+ if N1 =:= N2 ->
+ not ordsets:is_element(N1, Env);
+ true ->
+ false
+ end;
+ false ->
+ false
+ end;
+ _ ->
+ %% Other constructs are not being considered.
+ false
+ end
+ end.
+
+equivalent_lists([E1 | Es1], [E2 | Es2], Env) ->
+ equivalent(E1, E2, Env) and equivalent_lists(Es1, Es2, Env);
+equivalent_lists([], [], _) ->
+ true;
+equivalent_lists(_, _, _) ->
+ false.
+
+%% Return `false' or `{true, EffectExpr, ValueExpr}'. The environment is
+%% passed for new-variable generation.
+
+reduce_bif_call(M, F, As, Env) ->
+ reduce_bif_call_1(M, F, length(As), As, Env).
+
+reduce_bif_call_1(erlang, element, 2, [X, Y], _Env) ->
+ case is_c_int(X) and is_c_tuple(Y) of
+ true ->
+ %% We are free to change the relative evaluation order of
+ %% the elements, so lifting out a particular element is OK.
+ T = list_to_tuple(tuple_es(Y)),
+ N = int_val(X),
+ if integer(N), N > 0, N =< size(T) ->
+ E = element(N, T),
+ Es = tuple_to_list(setelement(N, T, void())),
+ {true, make_seq(c_tuple(Es), E)};
+ true ->
+ false
+ end;
+ false ->
+ false
+ end;
+reduce_bif_call_1(erlang, hd, 1, [X], _Env) ->
+ case is_c_cons(X) of
+ true ->
+ %% Cf. `element/2' above.
+ {true, make_seq(cons_tl(X), cons_hd(X))};
+ false ->
+ false
+ end;
+reduce_bif_call_1(erlang, length, 1, [X], _Env) ->
+ case is_c_list(X) of
+ true ->
+ %% Cf. `erlang:size/1' below.
+ {true, make_seq(X, c_int(list_length(X)))};
+ false ->
+ false
+ end;
+reduce_bif_call_1(erlang, list_to_tuple, 1, [X], _Env) ->
+ case is_c_list(X) of
+ true ->
+ %% This does not actually preserve all the evaluation order
+ %% constraints of the list, but I don't imagine that it will
+ %% be a problem.
+ {true, c_tuple(list_elements(X))};
+ false ->
+ false
+ end;
+reduce_bif_call_1(erlang, setelement, 3, [X, Y, Z], Env) ->
+ case is_c_int(X) and is_c_tuple(Y) of
+ true ->
+ %% Here, unless `Z' is a simple expression, we must bind it
+ %% to a new variable, because in that case, `Z' must be
+ %% evaluated before any part of `Y'.
+ T = list_to_tuple(tuple_es(Y)),
+ N = int_val(X),
+ if integer(N), N > 0, N =< size(T) ->
+ E = element(N, T),
+ case is_simple(Z) of
+ true ->
+ Es = tuple_to_list(setelement(N, T, Z)),
+ {true, make_seq(E, c_tuple(Es))};
+ false ->
+ V = new_var(Env),
+ Es = tuple_to_list(setelement(N, T, V)),
+ E1 = make_seq(E, c_tuple(Es)),
+ {true, c_let([V], Z, E1)}
+ end;
+ true ->
+ false
+ end;
+ false ->
+ false
+ end;
+reduce_bif_call_1(erlang, size, 1, [X], _Env) ->
+ case is_c_tuple(X) of
+ true ->
+ %% Just evaluate the tuple for effect and use the size (the
+ %% arity) as the result.
+ {true, make_seq(X, c_int(tuple_arity(X)))};
+ false ->
+ false
+ end;
+reduce_bif_call_1(erlang, tl, 1, [X], _Env) ->
+ case is_c_cons(X) of
+ true ->
+ %% Cf. `element/2' above.
+ {true, make_seq(cons_hd(X), cons_tl(X))};
+ false ->
+ false
+ end;
+reduce_bif_call_1(erlang, tuple_to_list, 1, [X], _Env) ->
+ case is_c_tuple(X) of
+ true ->
+ %% This actually introduces slightly stronger constraints on
+ %% the evaluation order of the subexpressions.
+ {true, make_list(tuple_es(X))};
+ false ->
+ false
+ end;
+reduce_bif_call_1(_M, _F, _A, _As, _Env) ->
+ false.
+
+effort_is_active(S) ->
+ counter__is_active(st__get_effort(S)).
+
+size_is_active(S) ->
+ counter__is_active(st__get_size(S)).
+
+get_effort_limit(S) ->
+ counter__limit(st__get_effort(S)).
+
+new_active_effort(Limit, S) ->
+ st__set_effort(counter__new_active(Limit), S).
+
+revert_effort(S1, S2) ->
+ st__set_effort(st__get_effort(S1), S2).
+
+new_active_size(Limit, S) ->
+ st__set_size(counter__new_active(Limit), S).
+
+new_passive_size(Limit, S) ->
+ st__set_size(counter__new_passive(Limit), S).
+
+revert_size(S1, S2) ->
+ st__set_size(st__get_size(S1), S2).
+
+count_effort(N, S) ->
+ C = st__get_effort(S),
+ C1 = counter__add(N, C, effort, S),
+ case debug_counters() of
+ true ->
+ case counter__is_active(C1) of
+ true ->
+ V = counter__value(C1),
+ case V > get(counter_effort_max) of
+ true ->
+ put(counter_effort_max, V);
+ false ->
+ ok
+ end;
+ false ->
+ ok
+ end;
+ _ ->
+ ok
+ end,
+ st__set_effort(C1, S).
+
+count_size(N, S) ->
+ C = st__get_size(S),
+ C1 = counter__add(N, C, size, S),
+ case debug_counters() of
+ true ->
+ case counter__is_active(C1) of
+ true ->
+ V = counter__value(C1),
+ case V > get(counter_size_max) of
+ true ->
+ put(counter_size_max, V);
+ false ->
+ ok
+ end;
+ false ->
+ ok
+ end;
+ _ ->
+ ok
+ end,
+ st__set_size(C1, S).
+
+get_size_value(S) ->
+ counter__value(st__get_size(S)).
+
+get_size_limit(S) ->
+ counter__limit(st__get_size(S)).
+
+kill_id_anns([{'id',_} | As]) ->
+ kill_id_anns(As);
+kill_id_anns([A | As]) ->
+ [A | kill_id_anns(As)];
+kill_id_anns([]) ->
+ [].
+
+
+%% =====================================================================
+%% General utilities
+
+max(X, Y) when X > Y -> X;
+max(_, Y) -> Y.
+
+%% The atom `ok', is widely used in Erlang for "void" values.
+
+void() -> abstract(ok).
+
+is_simple(E) ->
+ case type(E) of
+ literal -> true;
+ var -> true;
+ 'fun' -> true;
+ _ -> false
+ end.
+
+get_components(N, E) ->
+ case type(E) of
+ values ->
+ Es = values_es(E),
+ if length(Es) == N ->
+ {true, Es};
+ true ->
+ false
+ end;
+ _ when N == 1 ->
+ {true, [E]};
+ _ ->
+ false
+ end.
+
+all_static([E | Es]) ->
+ case is_literal(result(E)) of
+ true ->
+ all_static(Es);
+ false ->
+ false
+ end;
+all_static([]) ->
+ true.
+
+set_clause_bodies([C | Cs], B) ->
+ [update_c_clause(C, clause_pats(C), clause_guard(C), B)
+ | set_clause_bodies(Cs, B)];
+set_clause_bodies([], _) ->
+ [].
+
+filename([C | T]) when integer(C), C > 0, C =< 255 ->
+ [C | filename(T)];
+filename([H|T]) ->
+ filename(H) ++ filename(T);
+filename([]) ->
+ [];
+filename(N) when atom(N) ->
+ atom_to_list(N);
+filename(N) ->
+ report_error("bad filename: `~P'.", [N, 25]),
+ exit(error).
+
+
+%% =====================================================================
+%% Abstract datatype: renaming()
+
+ren__identity() ->
+ dict:new().
+
+ren__add(X, Y, Ren) ->
+ dict:store(X, Y, Ren).
+
+ren__map(X, Ren) ->
+ case dict:find(X, Ren) of
+ {ok, Y} ->
+ Y;
+ error ->
+ X
+ end.
+
+ren__add_identity(X, Ren) ->
+ dict:erase(X, Ren).
+
+
+%% =====================================================================
+%% Abstract datatype: environment()
+
+env__empty() ->
+ rec_env:empty().
+
+env__bind(Key, Val, Env) ->
+ rec_env:bind(Key, Val, Env).
+
+%% `Es' should have type `[{Key, Val}]', and `Fun' should have type
+%% `(Val, Env) -> T', mapping a value together with the recursive
+%% environment itself to some term `T' to be returned when the entry is
+%% looked up.
+
+env__bind_recursive(Ks, Vs, F, Env) ->
+ rec_env:bind_recursive(Ks, Vs, F, Env).
+
+env__lookup(Key, Env) ->
+ rec_env:lookup(Key, Env).
+
+env__get(Key, Env) ->
+ rec_env:get(Key, Env).
+
+env__is_defined(Key, Env) ->
+ rec_env:is_defined(Key, Env).
+
+env__new_vname(Env) ->
+ rec_env:new_key(Env).
+
+env__new_fname(A, N, Env) ->
+ rec_env:new_key(fun (X) ->
+ S = integer_to_list(X),
+ {list_to_atom(atom_to_list(A) ++ "_" ++ S),
+ N}
+ end, Env).
+
+
+%% =====================================================================
+%% Abstract datatype: state()
+
+-record(state, {free, % next free location
+ size, % size counter
+ effort, % effort counter
+ cache, % operand expression cache
+ var_flags, % flags for variables (#ref-structures)
+ opnd_flags, % flags for operands
+ app_flags}). % flags for #app-structures
+
+%% Note that we do not have a `var_assigned' flag, since there is no
+%% destructive assignment in Erlang. In the original algorithm, the
+%% "residual-referenced"-flags of the previous inlining pass (or
+%% initialization pass) are used as the "source-referenced"-flags for
+%% the subsequent pass. The latter may then be used as a safe
+%% approximation whenever we need to base a decision on whether or not a
+%% particular variable or function variable could be referenced in the
+%% program being generated, and computation of the new
+%% "residual-referenced" flag for that variable is not yet finished. In
+%% the present algorithm, this can only happen in the presence of
+%% variable assignments, which do not exist in Erlang. Therefore, we do
+%% not keep "source-referenced" flags for residual-code references in
+%% our implementation.
+%%
+%% The "inner-pending" flag tells us whether we are already in the
+%% process of visiting a particular operand, and the "outer-pending"
+%% flag whether we are in the process of inlining a propagated
+%% functional value. The "pending flags" are really counters limiting
+%% the number of times an operand may be inlined recursively, causing
+%% loop unrolling; however, unrolling more than one iteration does not
+%% work offhand in the present implementation. (TODO: find out why.)
+%% Note that the initial value must be greater than zero in order for
+%% any inlining at all to be done.
+
+%% Flags are stored in ETS-tables, one table for each class. The second
+%% element in each stored tuple is the key (the "label").
+
+-record(var_flags, {lab, referenced = false}).
+-record(opnd_flags, {lab, inner_pending = 1, outer_pending = 1,
+ effect = false}).
+-record(app_flags, {lab, inlined = false}).
+
+st__new(Effort, Size) ->
+ #state{free = 0,
+ size = counter__new_passive(Size),
+ effort = counter__new_passive(Effort),
+ cache = dict:new(),
+ var_flags = ets:new(var, [set, private, {keypos, 2}]),
+ opnd_flags = ets:new(opnd, [set, private, {keypos, 2}]),
+ app_flags = ets:new(app, [set, private, {keypos, 2}])}.
+
+st__new_loc(S) ->
+ N = S#state.free,
+ {N, S#state{free = N + 1}}.
+
+st__get_effort(S) ->
+ S#state.effort.
+
+st__set_effort(C, S) ->
+ S#state{effort = C}.
+
+st__get_size(S) ->
+ S#state.size.
+
+st__set_size(C, S) ->
+ S#state{size = C}.
+
+st__set_var_referenced(L, S) ->
+ T = S#state.var_flags,
+ [F] = ets:lookup(T, L),
+ ets:insert(T, F#var_flags{referenced = true}),
+ S.
+
+st__get_var_referenced(L, S) ->
+ ets:lookup_element(S#state.var_flags, L, #var_flags.referenced).
+
+st__lookup_opnd_cache(L, S) ->
+ dict:find(L, S#state.cache).
+
+%% Note that setting the cache should only be done once.
+
+st__set_opnd_cache(L, C, S) ->
+ S#state{cache = dict:store(L, C, S#state.cache)}.
+
+st__set_opnd_effect(L, S) ->
+ T = S#state.opnd_flags,
+ [F] = ets:lookup(T, L),
+ ets:insert(T, F#opnd_flags{effect = true}),
+ S.
+
+st__get_opnd_effect(L, S) ->
+ ets:lookup_element(S#state.opnd_flags, L, #opnd_flags.effect).
+
+st__set_app_inlined(L, S) ->
+ T = S#state.app_flags,
+ [F] = ets:lookup(T, L),
+ ets:insert(T, F#app_flags{inlined = true}),
+ S.
+
+st__clear_app_inlined(L, S) ->
+ T = S#state.app_flags,
+ [F] = ets:lookup(T, L),
+ ets:insert(T, F#app_flags{inlined = false}),
+ S.
+
+st__get_app_inlined(L, S) ->
+ ets:lookup_element(S#state.app_flags, L, #app_flags.inlined).
+
+%% The pending-flags are initialized by `st__new_opnd_loc' below.
+
+st__test_inner_pending(L, S) ->
+ T = S#state.opnd_flags,
+ P = ets:lookup_element(T, L, #opnd_flags.inner_pending),
+ P =< 0.
+
+st__mark_inner_pending(L, S) ->
+ ets:update_counter(S#state.opnd_flags, L,
+ {#opnd_flags.inner_pending, -1}),
+ S.
+
+st__clear_inner_pending(L, S) ->
+ ets:update_counter(S#state.opnd_flags, L,
+ {#opnd_flags.inner_pending, 1}),
+ S.
+
+st__test_outer_pending(L, S) ->
+ T = S#state.opnd_flags,
+ P = ets:lookup_element(T, L, #opnd_flags.outer_pending),
+ P =< 0.
+
+st__mark_outer_pending(L, S) ->
+ ets:update_counter(S#state.opnd_flags, L,
+ {#opnd_flags.outer_pending, -1}),
+ S.
+
+st__clear_outer_pending(L, S) ->
+ ets:update_counter(S#state.opnd_flags, L,
+ {#opnd_flags.outer_pending, 1}),
+ S.
+
+st__new_app_loc(S) ->
+ V = {L, _S1} = st__new_loc(S),
+ ets:insert(S#state.app_flags, #app_flags{lab = L}),
+ V.
+
+st__new_ref_loc(S) ->
+ V = {L, _S1} = st__new_loc(S),
+ ets:insert(S#state.var_flags, #var_flags{lab = L}),
+ V.
+
+st__new_opnd_loc(S) ->
+ V = {L, _S1} = st__new_loc(S),
+ ets:insert(S#state.opnd_flags, #opnd_flags{lab = L}),
+ V.
+
+
+%% =====================================================================
+%% Abstract datatype: counter()
+%%
+%% `counter__add' throws `{counter_exceeded, Type, Data}' if the
+%% resulting counter value would exceed the limit for the counter in
+%% question (`Type' and `Data' are given by the user).
+
+-record(counter, {active, value, limit}).
+
+counter__new_passive(Limit) when Limit > 0 ->
+ {0, Limit}.
+
+counter__new_active(Limit) when Limit > 0 ->
+ {Limit, Limit}.
+
+%% Active counters have values > 0 internally; passive counters start at
+%% zero. The 'limit' field is only accessed by the 'counter__limit'
+%% function.
+
+counter__is_active({C, _}) ->
+ C > 0.
+
+counter__limit({_, L}) ->
+ L.
+
+counter__value({N, L}) ->
+ if N > 0 ->
+ L - N;
+ true ->
+ -N
+ end.
+
+counter__add(N, {V, L}, Type, Data) ->
+ N1 = V - N,
+ if V > 0, N1 =< 0 ->
+ case debug_counters() of
+ true ->
+ case Type of
+ effort ->
+ put(counter_effort_triggers,
+ get(counter_effort_triggers) + 1);
+ size ->
+ put(counter_size_triggers,
+ get(counter_size_triggers) + 1)
+ end;
+ _ ->
+ ok
+ end,
+ throw({counter_exceeded, Type, Data});
+ true ->
+ {N1, L}
+ end.
+
+
+%% =====================================================================
+%% Reporting
+
+% report_internal_error(S) ->
+% report_internal_error(S, []).
+
+report_internal_error(S, Vs) ->
+ report_error("internal error: " ++ S, Vs).
+
+report_error(D) ->
+ report_error(D, []).
+
+report_error({F, L, D}, Vs) ->
+ report({F, L, {error, D}}, Vs);
+report_error(D, Vs) ->
+ report({error, D}, Vs).
+
+report_warning(D) ->
+ report_warning(D, []).
+
+report_warning({F, L, D}, Vs) ->
+ report({F, L, {warning, D}}, Vs);
+report_warning(D, Vs) ->
+ report({warning, D}, Vs).
+
+report(D, Vs) ->
+ io:put_chars(format(D, Vs)).
+
+format({error, D}, Vs) ->
+ ["error: ", format(D, Vs)];
+format({warning, D}, Vs) ->
+ ["warning: ", format(D, Vs)];
+format({"", L, D}, Vs) when integer(L), L > 0 ->
+ [io_lib:fwrite("~w: ", [L]), format(D, Vs)];
+format({"", _L, D}, Vs) ->
+ format(D, Vs);
+format({F, L, D}, Vs) when integer(L), L > 0 ->
+ [io_lib:fwrite("~s:~w: ", [filename(F), L]), format(D, Vs)];
+format({F, _L, D}, Vs) ->
+ [io_lib:fwrite("~s: ", [filename(F)]), format(D, Vs)];
+format(S, Vs) when list(S) ->
+ [io_lib:fwrite(S, Vs), $\n].
+
+
+%% =====================================================================
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_trees.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_trees.erl
new file mode 100644
index 0000000000..50384a6ff8
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/cerl_trees.erl
@@ -0,0 +1,801 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Richard Carlsson.
+%% Copyright (C) 1999-2002 Richard Carlsson.
+%% Portions created by Ericsson are Copyright 2001, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: cerl_trees.erl,v 1.2 2010/06/07 06:32:39 kostis Exp $
+
+%% @doc Basic functions on Core Erlang abstract syntax trees.
+%%
+%% <p>Syntax trees are defined in the module <a
+%% href=""><code>cerl</code></a>.</p>
+%%
+%% @type cerl() = cerl:cerl()
+
+-module(cerl_trees).
+
+-export([depth/1, fold/3, free_variables/1, label/1, label/2, map/2,
+ mapfold/3, size/1, variables/1]).
+
+-import(cerl, [alias_pat/1, alias_var/1, ann_c_alias/3, ann_c_apply/3,
+ ann_c_binary/2, ann_c_bitstr/6, ann_c_call/4,
+ ann_c_case/3, ann_c_catch/2, ann_c_clause/4,
+ ann_c_cons_skel/3, ann_c_fun/3, ann_c_let/4,
+ ann_c_letrec/3, ann_c_module/5, ann_c_primop/3,
+ ann_c_receive/4, ann_c_seq/3, ann_c_try/6,
+ ann_c_tuple_skel/2, ann_c_values/2, apply_args/1,
+ apply_op/1, binary_segments/1, bitstr_val/1,
+ bitstr_size/1, bitstr_unit/1, bitstr_type/1,
+ bitstr_flags/1, call_args/1, call_module/1, call_name/1,
+ case_arg/1, case_clauses/1, catch_body/1, clause_body/1,
+ clause_guard/1, clause_pats/1, clause_vars/1, concrete/1,
+ cons_hd/1, cons_tl/1, fun_body/1, fun_vars/1, get_ann/1,
+ let_arg/1, let_body/1, let_vars/1, letrec_body/1,
+ letrec_defs/1, letrec_vars/1, module_attrs/1,
+ module_defs/1, module_exports/1, module_name/1,
+ module_vars/1, primop_args/1, primop_name/1,
+ receive_action/1, receive_clauses/1, receive_timeout/1,
+ seq_arg/1, seq_body/1, set_ann/2, subtrees/1, try_arg/1,
+ try_body/1, try_vars/1, try_evars/1, try_handler/1,
+ tuple_es/1, type/1, update_c_alias/3, update_c_apply/3,
+ update_c_binary/2, update_c_bitstr/6, update_c_call/4,
+ update_c_case/3, update_c_catch/2, update_c_clause/4,
+ update_c_cons/3, update_c_cons_skel/3, update_c_fun/3,
+ update_c_let/4, update_c_letrec/3, update_c_module/5,
+ update_c_primop/3, update_c_receive/4, update_c_seq/3,
+ update_c_try/6, update_c_tuple/2, update_c_tuple_skel/2,
+ update_c_values/2, values_es/1, var_name/1]).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec depth(Tree::cerl) -> integer()
+%%
+%% @doc Returns the length of the longest path in the tree. A leaf
+%% node has depth zero, the tree representing "<code>{foo,
+%% bar}</code>" has depth one, etc.
+
+depth(T) ->
+ case subtrees(T) of
+ [] ->
+ 0;
+ Gs ->
+ 1 + lists:foldl(fun (G, A) -> erlang:max(depth_1(G), A) end, 0, Gs)
+ end.
+
+depth_1(Ts) ->
+ lists:foldl(fun (T, A) -> erlang:max(depth(T), A) end, 0, Ts).
+
+%% max(X, Y) when X > Y -> X;
+%% max(_, Y) -> Y.
+
+
+%% @spec size(Tree::cerl()) -> integer()
+%%
+%% @doc Returns the number of nodes in <code>Tree</code>.
+
+size(T) ->
+ fold(fun (_, S) -> S + 1 end, 0, T).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec map(Function, Tree::cerl()) -> cerl()
+%%
+%% Function = (cerl()) -> cerl()
+%%
+%% @doc Maps a function onto the nodes of a tree. This replaces each
+%% node in the tree by the result of applying the given function on
+%% the original node, bottom-up.
+%%
+%% @see mapfold/3
+
+map(F, T) ->
+ F(map_1(F, T)).
+
+map_1(F, T) ->
+ case type(T) of
+ literal ->
+ case concrete(T) of
+ [_ | _] ->
+ update_c_cons(T, map(F, cons_hd(T)),
+ map(F, cons_tl(T)));
+ V when tuple_size(V) > 0 ->
+ update_c_tuple(T, map_list(F, tuple_es(T)));
+ _ ->
+ T
+ end;
+ var ->
+ T;
+ values ->
+ update_c_values(T, map_list(F, values_es(T)));
+ cons ->
+ update_c_cons_skel(T, map(F, cons_hd(T)),
+ map(F, cons_tl(T)));
+ tuple ->
+ update_c_tuple_skel(T, map_list(F, tuple_es(T)));
+ 'let' ->
+ update_c_let(T, map_list(F, let_vars(T)),
+ map(F, let_arg(T)),
+ map(F, let_body(T)));
+ seq ->
+ update_c_seq(T, map(F, seq_arg(T)),
+ map(F, seq_body(T)));
+ apply ->
+ update_c_apply(T, map(F, apply_op(T)),
+ map_list(F, apply_args(T)));
+ call ->
+ update_c_call(T, map(F, call_module(T)),
+ map(F, call_name(T)),
+ map_list(F, call_args(T)));
+ primop ->
+ update_c_primop(T, map(F, primop_name(T)),
+ map_list(F, primop_args(T)));
+ 'case' ->
+ update_c_case(T, map(F, case_arg(T)),
+ map_list(F, case_clauses(T)));
+ clause ->
+ update_c_clause(T, map_list(F, clause_pats(T)),
+ map(F, clause_guard(T)),
+ map(F, clause_body(T)));
+ alias ->
+ update_c_alias(T, map(F, alias_var(T)),
+ map(F, alias_pat(T)));
+ 'fun' ->
+ update_c_fun(T, map_list(F, fun_vars(T)),
+ map(F, fun_body(T)));
+ 'receive' ->
+ update_c_receive(T, map_list(F, receive_clauses(T)),
+ map(F, receive_timeout(T)),
+ map(F, receive_action(T)));
+ 'try' ->
+ update_c_try(T, map(F, try_arg(T)),
+ map_list(F, try_vars(T)),
+ map(F, try_body(T)),
+ map_list(F, try_evars(T)),
+ map(F, try_handler(T)));
+ 'catch' ->
+ update_c_catch(T, map(F, catch_body(T)));
+ binary ->
+ update_c_binary(T, map_list(F, binary_segments(T)));
+ bitstr ->
+ update_c_bitstr(T, map(F, bitstr_val(T)),
+ map(F, bitstr_size(T)),
+ map(F, bitstr_unit(T)),
+ map(F, bitstr_type(T)),
+ map(F, bitstr_flags(T)));
+ letrec ->
+ update_c_letrec(T, map_pairs(F, letrec_defs(T)),
+ map(F, letrec_body(T)));
+ module ->
+ update_c_module(T, map(F, module_name(T)),
+ map_list(F, module_exports(T)),
+ map_pairs(F, module_attrs(T)),
+ map_pairs(F, module_defs(T)))
+ end.
+
+map_list(F, [T | Ts]) ->
+ [map(F, T) | map_list(F, Ts)];
+map_list(_, []) ->
+ [].
+
+map_pairs(F, [{T1, T2} | Ps]) ->
+ [{map(F, T1), map(F, T2)} | map_pairs(F, Ps)];
+map_pairs(_, []) ->
+ [].
+
+
+%% @spec fold(Function, Unit::term(), Tree::cerl()) -> term()
+%%
+%% Function = (cerl(), term()) -> term()
+%%
+%% @doc Does a fold operation over the nodes of the tree. The result
+%% is the value of <code>Function(X1, Function(X2, ... Function(Xn,
+%% Unit) ... ))</code>, where <code>X1, ..., Xn</code> are the nodes
+%% of <code>Tree</code> in a post-order traversal.
+%%
+%% @see mapfold/3
+
+fold(F, S, T) ->
+ F(T, fold_1(F, S, T)).
+
+fold_1(F, S, T) ->
+ case type(T) of
+ literal ->
+ case concrete(T) of
+ [_ | _] ->
+ fold(F, fold(F, S, cons_hd(T)), cons_tl(T));
+ V when tuple_size(V) > 0 ->
+ fold_list(F, S, tuple_es(T));
+ _ ->
+ S
+ end;
+ var ->
+ S;
+ values ->
+ fold_list(F, S, values_es(T));
+ cons ->
+ fold(F, fold(F, S, cons_hd(T)), cons_tl(T));
+ tuple ->
+ fold_list(F, S, tuple_es(T));
+ 'let' ->
+ fold(F, fold(F, fold_list(F, S, let_vars(T)),
+ let_arg(T)),
+ let_body(T));
+ seq ->
+ fold(F, fold(F, S, seq_arg(T)), seq_body(T));
+ apply ->
+ fold_list(F, fold(F, S, apply_op(T)), apply_args(T));
+ call ->
+ fold_list(F, fold(F, fold(F, S, call_module(T)),
+ call_name(T)),
+ call_args(T));
+ primop ->
+ fold_list(F, fold(F, S, primop_name(T)), primop_args(T));
+ 'case' ->
+ fold_list(F, fold(F, S, case_arg(T)), case_clauses(T));
+ clause ->
+ fold(F, fold(F, fold_list(F, S, clause_pats(T)),
+ clause_guard(T)),
+ clause_body(T));
+ alias ->
+ fold(F, fold(F, S, alias_var(T)), alias_pat(T));
+ 'fun' ->
+ fold(F, fold_list(F, S, fun_vars(T)), fun_body(T));
+ 'receive' ->
+ fold(F, fold(F, fold_list(F, S, receive_clauses(T)),
+ receive_timeout(T)),
+ receive_action(T));
+ 'try' ->
+ fold(F, fold_list(F, fold(F, fold_list(F, fold(F, S, try_arg(T)),
+ try_vars(T)),
+ try_body(T)),
+ try_evars(T)),
+ try_handler(T));
+ 'catch' ->
+ fold(F, S, catch_body(T));
+ binary ->
+ fold_list(F, S, binary_segments(T));
+ bitstr ->
+ fold(F,
+ fold(F,
+ fold(F,
+ fold(F,
+ fold(F, S, bitstr_val(T)),
+ bitstr_size(T)),
+ bitstr_unit(T)),
+ bitstr_type(T)),
+ bitstr_flags(T));
+ letrec ->
+ fold(F, fold_pairs(F, S, letrec_defs(T)), letrec_body(T));
+ module ->
+ fold_pairs(F,
+ fold_pairs(F,
+ fold_list(F,
+ fold(F, S, module_name(T)),
+ module_exports(T)),
+ module_attrs(T)),
+ module_defs(T))
+ end.
+
+fold_list(F, S, [T | Ts]) ->
+ fold_list(F, fold(F, S, T), Ts);
+fold_list(_, S, []) ->
+ S.
+
+fold_pairs(F, S, [{T1, T2} | Ps]) ->
+ fold_pairs(F, fold(F, fold(F, S, T1), T2), Ps);
+fold_pairs(_, S, []) ->
+ S.
+
+
+%% @spec mapfold(Function, Initial::term(), Tree::cerl()) ->
+%% {cerl(), term()}
+%%
+%% Function = (cerl(), term()) -> {cerl(), term()}
+%%
+%% @doc Does a combined map/fold operation on the nodes of the
+%% tree. This is similar to <code>map/2</code>, but also propagates a
+%% value from each application of <code>Function</code> to the next,
+%% starting with the given value <code>Initial</code>, while doing a
+%% post-order traversal of the tree, much like <code>fold/3</code>.
+%%
+%% @see map/2
+%% @see fold/3
+
+mapfold(F, S0, T) ->
+ case type(T) of
+ literal ->
+ case concrete(T) of
+ [_ | _] ->
+ {T1, S1} = mapfold(F, S0, cons_hd(T)),
+ {T2, S2} = mapfold(F, S1, cons_tl(T)),
+ F(update_c_cons(T, T1, T2), S2);
+ V when tuple_size(V) > 0 ->
+ {Ts, S1} = mapfold_list(F, S0, tuple_es(T)),
+ F(update_c_tuple(T, Ts), S1);
+ _ ->
+ F(T, S0)
+ end;
+ var ->
+ F(T, S0);
+ values ->
+ {Ts, S1} = mapfold_list(F, S0, values_es(T)),
+ F(update_c_values(T, Ts), S1);
+ cons ->
+ {T1, S1} = mapfold(F, S0, cons_hd(T)),
+ {T2, S2} = mapfold(F, S1, cons_tl(T)),
+ F(update_c_cons_skel(T, T1, T2), S2);
+ tuple ->
+ {Ts, S1} = mapfold_list(F, S0, tuple_es(T)),
+ F(update_c_tuple_skel(T, Ts), S1);
+ 'let' ->
+ {Vs, S1} = mapfold_list(F, S0, let_vars(T)),
+ {A, S2} = mapfold(F, S1, let_arg(T)),
+ {B, S3} = mapfold(F, S2, let_body(T)),
+ F(update_c_let(T, Vs, A, B), S3);
+ seq ->
+ {A, S1} = mapfold(F, S0, seq_arg(T)),
+ {B, S2} = mapfold(F, S1, seq_body(T)),
+ F(update_c_seq(T, A, B), S2);
+ apply ->
+ {E, S1} = mapfold(F, S0, apply_op(T)),
+ {As, S2} = mapfold_list(F, S1, apply_args(T)),
+ F(update_c_apply(T, E, As), S2);
+ call ->
+ {M, S1} = mapfold(F, S0, call_module(T)),
+ {N, S2} = mapfold(F, S1, call_name(T)),
+ {As, S3} = mapfold_list(F, S2, call_args(T)),
+ F(update_c_call(T, M, N, As), S3);
+ primop ->
+ {N, S1} = mapfold(F, S0, primop_name(T)),
+ {As, S2} = mapfold_list(F, S1, primop_args(T)),
+ F(update_c_primop(T, N, As), S2);
+ 'case' ->
+ {A, S1} = mapfold(F, S0, case_arg(T)),
+ {Cs, S2} = mapfold_list(F, S1, case_clauses(T)),
+ F(update_c_case(T, A, Cs), S2);
+ clause ->
+ {Ps, S1} = mapfold_list(F, S0, clause_pats(T)),
+ {G, S2} = mapfold(F, S1, clause_guard(T)),
+ {B, S3} = mapfold(F, S2, clause_body(T)),
+ F(update_c_clause(T, Ps, G, B), S3);
+ alias ->
+ {V, S1} = mapfold(F, S0, alias_var(T)),
+ {P, S2} = mapfold(F, S1, alias_pat(T)),
+ F(update_c_alias(T, V, P), S2);
+ 'fun' ->
+ {Vs, S1} = mapfold_list(F, S0, fun_vars(T)),
+ {B, S2} = mapfold(F, S1, fun_body(T)),
+ F(update_c_fun(T, Vs, B), S2);
+ 'receive' ->
+ {Cs, S1} = mapfold_list(F, S0, receive_clauses(T)),
+ {E, S2} = mapfold(F, S1, receive_timeout(T)),
+ {A, S3} = mapfold(F, S2, receive_action(T)),
+ F(update_c_receive(T, Cs, E, A), S3);
+ 'try' ->
+ {E, S1} = mapfold(F, S0, try_arg(T)),
+ {Vs, S2} = mapfold_list(F, S1, try_vars(T)),
+ {B, S3} = mapfold(F, S2, try_body(T)),
+ {Evs, S4} = mapfold_list(F, S3, try_evars(T)),
+ {H, S5} = mapfold(F, S4, try_handler(T)),
+ F(update_c_try(T, E, Vs, B, Evs, H), S5);
+ 'catch' ->
+ {B, S1} = mapfold(F, S0, catch_body(T)),
+ F(update_c_catch(T, B), S1);
+ binary ->
+ {Ds, S1} = mapfold_list(F, S0, binary_segments(T)),
+ F(update_c_binary(T, Ds), S1);
+ bitstr ->
+ {Val, S1} = mapfold(F, S0, bitstr_val(T)),
+ {Size, S2} = mapfold(F, S1, bitstr_size(T)),
+ {Unit, S3} = mapfold(F, S2, bitstr_unit(T)),
+ {Type, S4} = mapfold(F, S3, bitstr_type(T)),
+ {Flags, S5} = mapfold(F, S4, bitstr_flags(T)),
+ F(update_c_bitstr(T, Val, Size, Unit, Type, Flags), S5);
+ letrec ->
+ {Ds, S1} = mapfold_pairs(F, S0, letrec_defs(T)),
+ {B, S2} = mapfold(F, S1, letrec_body(T)),
+ F(update_c_letrec(T, Ds, B), S2);
+ module ->
+ {N, S1} = mapfold(F, S0, module_name(T)),
+ {Es, S2} = mapfold_list(F, S1, module_exports(T)),
+ {As, S3} = mapfold_pairs(F, S2, module_attrs(T)),
+ {Ds, S4} = mapfold_pairs(F, S3, module_defs(T)),
+ F(update_c_module(T, N, Es, As, Ds), S4)
+ end.
+
+mapfold_list(F, S0, [T | Ts]) ->
+ {T1, S1} = mapfold(F, S0, T),
+ {Ts1, S2} = mapfold_list(F, S1, Ts),
+ {[T1 | Ts1], S2};
+mapfold_list(_, S, []) ->
+ {[], S}.
+
+mapfold_pairs(F, S0, [{T1, T2} | Ps]) ->
+ {T3, S1} = mapfold(F, S0, T1),
+ {T4, S2} = mapfold(F, S1, T2),
+ {Ps1, S3} = mapfold_pairs(F, S2, Ps),
+ {[{T3, T4} | Ps1], S3};
+mapfold_pairs(_, S, []) ->
+ {[], S}.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec variables(Tree::cerl()) -> [var_name()]
+%%
+%% var_name() = integer() | atom() | {atom(), integer()}
+%%
+%% @doc Returns an ordered-set list of the names of all variables in
+%% the syntax tree. (This includes function name variables.) An
+%% exception is thrown if <code>Tree</code> does not represent a
+%% well-formed Core Erlang syntax tree.
+%%
+%% @see free_variables/1
+
+variables(T) ->
+ variables(T, false).
+
+
+%% @spec free_variables(Tree::cerl()) -> [var_name()]
+%%
+%% @doc Like <code>variables/1</code>, but only includes variables
+%% that are free in the tree.
+%%
+%% @see variables/1
+
+free_variables(T) ->
+ variables(T, true).
+
+
+%% This is not exported
+
+variables(T, S) ->
+ case type(T) of
+ literal ->
+ [];
+ var ->
+ [var_name(T)];
+ values ->
+ vars_in_list(values_es(T), S);
+ cons ->
+ ordsets:union(variables(cons_hd(T), S),
+ variables(cons_tl(T), S));
+ tuple ->
+ vars_in_list(tuple_es(T), S);
+ 'let' ->
+ Vs = variables(let_body(T), S),
+ Vs1 = var_list_names(let_vars(T)),
+ Vs2 = case S of
+ true ->
+ ordsets:subtract(Vs, Vs1);
+ false ->
+ ordsets:union(Vs, Vs1)
+ end,
+ ordsets:union(variables(let_arg(T), S), Vs2);
+ seq ->
+ ordsets:union(variables(seq_arg(T), S),
+ variables(seq_body(T), S));
+ apply ->
+ ordsets:union(
+ variables(apply_op(T), S),
+ vars_in_list(apply_args(T), S));
+ call ->
+ ordsets:union(variables(call_module(T), S),
+ ordsets:union(
+ variables(call_name(T), S),
+ vars_in_list(call_args(T), S)));
+ primop ->
+ vars_in_list(primop_args(T), S);
+ 'case' ->
+ ordsets:union(variables(case_arg(T), S),
+ vars_in_list(case_clauses(T), S));
+ clause ->
+ Vs = ordsets:union(variables(clause_guard(T), S),
+ variables(clause_body(T), S)),
+ Vs1 = vars_in_list(clause_pats(T), S),
+ case S of
+ true ->
+ ordsets:subtract(Vs, Vs1);
+ false ->
+ ordsets:union(Vs, Vs1)
+ end;
+ alias ->
+ ordsets:add_element(var_name(alias_var(T)),
+ variables(alias_pat(T)));
+ 'fun' ->
+ Vs = variables(fun_body(T), S),
+ Vs1 = var_list_names(fun_vars(T)),
+ case S of
+ true ->
+ ordsets:subtract(Vs, Vs1);
+ false ->
+ ordsets:union(Vs, Vs1)
+ end;
+ 'receive' ->
+ ordsets:union(
+ vars_in_list(receive_clauses(T), S),
+ ordsets:union(variables(receive_timeout(T), S),
+ variables(receive_action(T), S)));
+ 'try' ->
+ Vs = variables(try_body(T), S),
+ Vs1 = var_list_names(try_vars(T)),
+ Vs2 = case S of
+ true ->
+ ordsets:subtract(Vs, Vs1);
+ false ->
+ ordsets:union(Vs, Vs1)
+ end,
+ Vs3 = variables(try_handler(T), S),
+ Vs4 = var_list_names(try_evars(T)),
+ Vs5 = case S of
+ true ->
+ ordsets:subtract(Vs3, Vs4);
+ false ->
+ ordsets:union(Vs3, Vs4)
+ end,
+ ordsets:union(variables(try_arg(T), S),
+ ordsets:union(Vs2, Vs5));
+ 'catch' ->
+ variables(catch_body(T), S);
+ binary ->
+ vars_in_list(binary_segments(T), S);
+ bitstr ->
+ ordsets:union(variables(bitstr_val(T), S),
+ variables(bitstr_size(T), S));
+ letrec ->
+ Vs = vars_in_defs(letrec_defs(T), S),
+ Vs1 = ordsets:union(variables(letrec_body(T), S), Vs),
+ Vs2 = var_list_names(letrec_vars(T)),
+ case S of
+ true ->
+ ordsets:subtract(Vs1, Vs2);
+ false ->
+ ordsets:union(Vs1, Vs2)
+ end;
+ module ->
+ Vs = vars_in_defs(module_defs(T), S),
+ Vs1 = ordsets:union(vars_in_list(module_exports(T), S), Vs),
+ Vs2 = var_list_names(module_vars(T)),
+ case S of
+ true ->
+ ordsets:subtract(Vs1, Vs2);
+ false ->
+ ordsets:union(Vs1, Vs2)
+ end
+ end.
+
+vars_in_list(Ts, S) ->
+ vars_in_list(Ts, S, []).
+
+vars_in_list([T | Ts], S, A) ->
+ vars_in_list(Ts, S, ordsets:union(variables(T, S), A));
+vars_in_list([], _, A) ->
+ A.
+
+%% Note that this function only visits the right-hand side of function
+%% definitions.
+
+vars_in_defs(Ds, S) ->
+ vars_in_defs(Ds, S, []).
+
+vars_in_defs([{_, F} | Ds], S, A) ->
+ vars_in_defs(Ds, S, ordsets:union(variables(F, S), A));
+vars_in_defs([], _, A) ->
+ A.
+
+%% This amounts to insertion sort. Since the lists are generally short,
+%% it is hardly worthwhile to use an asymptotically better sort.
+
+var_list_names(Vs) ->
+ var_list_names(Vs, []).
+
+var_list_names([V | Vs], A) ->
+ var_list_names(Vs, ordsets:add_element(var_name(V), A));
+var_list_names([], A) ->
+ A.
+
+
+%% ---------------------------------------------------------------------
+
+%% label(Tree::cerl()) -> {cerl(), integer()}
+%%
+%% @equiv label(Tree, 0)
+
+label(T) ->
+ label(T, 0).
+
+%% @spec label(Tree::cerl(), N::integer()) -> {cerl(), integer()}
+%%
+%% @doc Labels each expression in the tree. A term <code>{label,
+%% L}</code> is prefixed to the annotation list of each expression node,
+%% where L is a unique number for every node, except for variables (and
+%% function name variables) which get the same label if they represent
+%% the same variable. Constant literal nodes are not labeled.
+%%
+%% <p>The returned value is a tuple <code>{NewTree, Max}</code>, where
+%% <code>NewTree</code> is the labeled tree and <code>Max</code> is 1
+%% plus the largest label value used. All previous annotation terms on
+%% the form <code>{label, X}</code> are deleted.</p>
+%%
+%% <p>The values of L used in the tree is a dense range from
+%% <code>N</code> to <code>Max - 1</code>, where <code>N =&lt; Max
+%% =&lt; N + size(Tree)</code>. Note that it is possible that no
+%% labels are used at all, i.e., <code>N = Max</code>.</p>
+%%
+%% <p>Note: All instances of free variables will be given distinct
+%% labels.</p>
+%%
+%% @see label/1
+%% @see size/1
+
+label(T, N) ->
+ label(T, N, dict:new()).
+
+label(T, N, Env) ->
+ case type(T) of
+ literal ->
+ %% Constant literals are not labeled.
+ {T, N};
+ var ->
+ case dict:find(var_name(T), Env) of
+ {ok, L} ->
+ {As, _} = label_ann(T, L),
+ N1 = N;
+ error ->
+ {As, N1} = label_ann(T, N)
+ end,
+ {set_ann(T, As), N1};
+ values ->
+ {Ts, N1} = label_list(values_es(T), N, Env),
+ {As, N2} = label_ann(T, N1),
+ {ann_c_values(As, Ts), N2};
+ cons ->
+ {T1, N1} = label(cons_hd(T), N, Env),
+ {T2, N2} = label(cons_tl(T), N1, Env),
+ {As, N3} = label_ann(T, N2),
+ {ann_c_cons_skel(As, T1, T2), N3};
+ tuple ->
+ {Ts, N1} = label_list(tuple_es(T), N, Env),
+ {As, N2} = label_ann(T, N1),
+ {ann_c_tuple_skel(As, Ts), N2};
+ 'let' ->
+ {A, N1} = label(let_arg(T), N, Env),
+ {Vs, N2, Env1} = label_vars(let_vars(T), N1, Env),
+ {B, N3} = label(let_body(T), N2, Env1),
+ {As, N4} = label_ann(T, N3),
+ {ann_c_let(As, Vs, A, B), N4};
+ seq ->
+ {A, N1} = label(seq_arg(T), N, Env),
+ {B, N2} = label(seq_body(T), N1, Env),
+ {As, N3} = label_ann(T, N2),
+ {ann_c_seq(As, A, B), N3};
+ apply ->
+ {E, N1} = label(apply_op(T), N, Env),
+ {Es, N2} = label_list(apply_args(T), N1, Env),
+ {As, N3} = label_ann(T, N2),
+ {ann_c_apply(As, E, Es), N3};
+ call ->
+ {M, N1} = label(call_module(T), N, Env),
+ {F, N2} = label(call_name(T), N1, Env),
+ {Es, N3} = label_list(call_args(T), N2, Env),
+ {As, N4} = label_ann(T, N3),
+ {ann_c_call(As, M, F, Es), N4};
+ primop ->
+ {F, N1} = label(primop_name(T), N, Env),
+ {Es, N2} = label_list(primop_args(T), N1, Env),
+ {As, N3} = label_ann(T, N2),
+ {ann_c_primop(As, F, Es), N3};
+ 'case' ->
+ {A, N1} = label(case_arg(T), N, Env),
+ {Cs, N2} = label_list(case_clauses(T), N1, Env),
+ {As, N3} = label_ann(T, N2),
+ {ann_c_case(As, A, Cs), N3};
+ clause ->
+ {_, N1, Env1} = label_vars(clause_vars(T), N, Env),
+ {Ps, N2} = label_list(clause_pats(T), N1, Env1),
+ {G, N3} = label(clause_guard(T), N2, Env1),
+ {B, N4} = label(clause_body(T), N3, Env1),
+ {As, N5} = label_ann(T, N4),
+ {ann_c_clause(As, Ps, G, B), N5};
+ alias ->
+ {V, N1} = label(alias_var(T), N, Env),
+ {P, N2} = label(alias_pat(T), N1, Env),
+ {As, N3} = label_ann(T, N2),
+ {ann_c_alias(As, V, P), N3};
+ 'fun' ->
+ {Vs, N1, Env1} = label_vars(fun_vars(T), N, Env),
+ {B, N2} = label(fun_body(T), N1, Env1),
+ {As, N3} = label_ann(T, N2),
+ {ann_c_fun(As, Vs, B), N3};
+ 'receive' ->
+ {Cs, N1} = label_list(receive_clauses(T), N, Env),
+ {E, N2} = label(receive_timeout(T), N1, Env),
+ {A, N3} = label(receive_action(T), N2, Env),
+ {As, N4} = label_ann(T, N3),
+ {ann_c_receive(As, Cs, E, A), N4};
+ 'try' ->
+ {E, N1} = label(try_arg(T), N, Env),
+ {Vs, N2, Env1} = label_vars(try_vars(T), N1, Env),
+ {B, N3} = label(try_body(T), N2, Env1),
+ {Evs, N4, Env2} = label_vars(try_evars(T), N3, Env),
+ {H, N5} = label(try_handler(T), N4, Env2),
+ {As, N6} = label_ann(T, N5),
+ {ann_c_try(As, E, Vs, B, Evs, H), N6};
+ 'catch' ->
+ {B, N1} = label(catch_body(T), N, Env),
+ {As, N2} = label_ann(T, N1),
+ {ann_c_catch(As, B), N2};
+ binary ->
+ {Ds, N1} = label_list(binary_segments(T), N, Env),
+ {As, N2} = label_ann(T, N1),
+ {ann_c_binary(As, Ds), N2};
+ bitstr ->
+ {Val, N1} = label(bitstr_val(T), N, Env),
+ {Size, N2} = label(bitstr_size(T), N1, Env),
+ {Unit, N3} = label(bitstr_unit(T), N2, Env),
+ {Type, N4} = label(bitstr_type(T), N3, Env),
+ {Flags, N5} = label(bitstr_flags(T), N4, Env),
+ {As, N6} = label_ann(T, N5),
+ {ann_c_bitstr(As, Val, Size, Unit, Type, Flags), N6};
+ letrec ->
+ {_, N1, Env1} = label_vars(letrec_vars(T), N, Env),
+ {Ds, N2} = label_defs(letrec_defs(T), N1, Env1),
+ {B, N3} = label(letrec_body(T), N2, Env1),
+ {As, N4} = label_ann(T, N3),
+ {ann_c_letrec(As, Ds, B), N4};
+ module ->
+ %% The module name is not labeled.
+ {_, N1, Env1} = label_vars(module_vars(T), N, Env),
+ {Ts, N2} = label_defs(module_attrs(T), N1, Env1),
+ {Ds, N3} = label_defs(module_defs(T), N2, Env1),
+ {Es, N4} = label_list(module_exports(T), N3, Env1),
+ {As, N5} = label_ann(T, N4),
+ {ann_c_module(As, module_name(T), Es, Ts, Ds), N5}
+ end.
+
+label_list([T | Ts], N, Env) ->
+ {T1, N1} = label(T, N, Env),
+ {Ts1, N2} = label_list(Ts, N1, Env),
+ {[T1 | Ts1], N2};
+label_list([], N, _Env) ->
+ {[], N}.
+
+label_vars([T | Ts], N, Env) ->
+ Env1 = dict:store(var_name(T), N, Env),
+ {As, N1} = label_ann(T, N),
+ T1 = set_ann(T, As),
+ {Ts1, N2, Env2} = label_vars(Ts, N1, Env1),
+ {[T1 | Ts1], N2, Env2};
+label_vars([], N, Env) ->
+ {[], N, Env}.
+
+label_defs([{F, T} | Ds], N, Env) ->
+ {F1, N1} = label(F, N, Env),
+ {T1, N2} = label(T, N1, Env),
+ {Ds1, N3} = label_defs(Ds, N2, Env),
+ {[{F1, T1} | Ds1], N3};
+label_defs([], N, _Env) ->
+ {[], N}.
+
+label_ann(T, N) ->
+ {[{label, N} | filter_labels(get_ann(T))], N + 1}.
+
+filter_labels([{label, _} | As]) ->
+ filter_labels(As);
+filter_labels([A | As]) ->
+ [A | filter_labels(As)];
+filter_labels([]) ->
+ [].
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/compile.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/compile.erl
new file mode 100644
index 0000000000..4542bf9eb9
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/compile.erl
@@ -0,0 +1,1109 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: compile.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose: Run the Erlang compiler.
+
+-module(compile).
+-include("erl_compile.hrl").
+-include("core_parse.hrl").
+
+%% High-level interface.
+-export([file/1,file/2,format_error/1,iofile/1]).
+-export([forms/1,forms/2]).
+-export([output_generated/1]).
+-export([options/0]).
+
+%% Erlc interface.
+-export([compile/3,compile_beam/3,compile_asm/3,compile_core/3]).
+
+
+-import(lists, [member/2,reverse/1,keysearch/3,last/1,
+ map/2,flatmap/2,foreach/2,foldr/3,any/2,filter/2]).
+
+%% file(FileName)
+%% file(FileName, Options)
+%% Compile the module in file FileName.
+
+-define(DEFAULT_OPTIONS, [verbose,report_errors,report_warnings]).
+
+-define(pass(P), {P,fun P/1}).
+
+file(File) -> file(File, ?DEFAULT_OPTIONS).
+
+file(File, Opts) when list(Opts) ->
+ do_compile({file,File}, Opts++env_default_opts());
+file(File, Opt) ->
+ file(File, [Opt|?DEFAULT_OPTIONS]).
+
+forms(File) -> forms(File, ?DEFAULT_OPTIONS).
+
+forms(Forms, Opts) when list(Opts) ->
+ do_compile({forms,Forms}, [binary|Opts++env_default_opts()]);
+forms(Forms, Opts) when atom(Opts) ->
+ forms(Forms, [Opts|?DEFAULT_OPTIONS]).
+
+env_default_opts() ->
+ Key = "ERL_COMPILER_OPTIONS",
+ case os:getenv(Key) of
+ false -> [];
+ Str when list(Str) ->
+ case erl_scan:string(Str) of
+ {ok,Tokens,_} ->
+ case erl_parse:parse_term(Tokens ++ [{dot, 1}]) of
+ {ok,List} when list(List) -> List;
+ {ok,Term} -> [Term];
+ {error,_Reason} ->
+ io:format("Ignoring bad term in ~s\n", [Key]),
+ []
+ end;
+ {error, {_,_,_Reason}, _} ->
+ io:format("Ignoring bad term in ~s\n", [Key]),
+ []
+ end
+ end.
+
+do_compile(Input, Opts0) ->
+ Opts = expand_opts(Opts0),
+ Self = self(),
+ Serv = spawn_link(fun() -> internal(Self, Input, Opts) end),
+ receive
+ {Serv,Rep} -> Rep
+ end.
+
+%% Given a list of compilation options, returns true if compile:file/2
+%% would have generated a Beam file, false otherwise (if only a binary or a
+%% listing file would have been generated).
+
+output_generated(Opts) ->
+ any(fun ({save_binary,_F}) -> true;
+ (_Other) -> false
+ end, passes(file, expand_opts(Opts))).
+
+expand_opts(Opts) ->
+ foldr(fun expand_opt/2, [], Opts).
+
+expand_opt(basic_validation, Os) ->
+ [no_code_generation,to_pp,binary|Os];
+expand_opt(strong_validation, Os) ->
+ [no_code_generation,to_kernel,binary|Os];
+expand_opt(report, Os) ->
+ [report_errors,report_warnings|Os];
+expand_opt(return, Os) ->
+ [return_errors,return_warnings|Os];
+expand_opt(r7, Os) ->
+ [no_float_opt,no_new_funs,no_new_binaries,no_new_apply|Os];
+expand_opt(O, Os) -> [O|Os].
+
+filter_opts(Opts0) ->
+ %% Native code generation is not supported if no_new_funs is given.
+ case member(no_new_funs, Opts0) of
+ false -> Opts0;
+ true -> Opts0 -- [native]
+ end.
+
+%% format_error(ErrorDescriptor) -> string()
+
+format_error(no_native_support) ->
+ "this system is not configured for native-code compilation.";
+format_error({native, E}) ->
+ io_lib:fwrite("native-code compilation failed with reason: ~P.",
+ [E, 25]);
+format_error({native_crash, E}) ->
+ io_lib:fwrite("native-code compilation crashed with reason: ~P.",
+ [E, 25]);
+format_error({open,E}) ->
+ io_lib:format("open error '~s'", [file:format_error(E)]);
+format_error({epp,E}) ->
+ epp:format_error(E);
+format_error(write_error) ->
+ "error writing file";
+format_error({rename,S}) ->
+ io_lib:format("error renaming ~s", [S]);
+format_error({parse_transform,M,R}) ->
+ io_lib:format("error in parse transform '~s': ~p", [M, R]);
+format_error({core_transform,M,R}) ->
+ io_lib:format("error in core transform '~s': ~p", [M, R]);
+format_error({crash,Pass,Reason}) ->
+ io_lib:format("internal error in ~p;\ncrash reason: ~p", [Pass,Reason]);
+format_error({bad_return,Pass,Reason}) ->
+ io_lib:format("internal error in ~p;\nbad return value: ~p", [Pass,Reason]).
+
+%% The compile state record.
+-record(compile, {filename="",
+ dir="",
+ base="",
+ ifile="",
+ ofile="",
+ module=[],
+ code=[],
+ core_code=[],
+ abstract_code=[], %Abstract code for debugger.
+ options=[],
+ errors=[],
+ warnings=[]}).
+
+internal(Master, Input, Opts) ->
+ Master ! {self(),
+ case catch internal(Input, Opts) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Other ->
+ Other
+ end}.
+
+internal({forms,Forms}, Opts) ->
+ Ps = passes(forms, Opts),
+ internal_comp(Ps, "", "", #compile{code=Forms,options=Opts});
+internal({file,File}, Opts) ->
+ Ps = passes(file, Opts),
+ Compile = #compile{options=Opts},
+ case member(from_core, Opts) of
+ true -> internal_comp(Ps, File, ".core", Compile);
+ false ->
+ case member(from_beam, Opts) of
+ true ->
+ internal_comp(Ps, File, ".beam", Compile);
+ false ->
+ case member(from_asm, Opts) orelse member(asm, Opts) of
+ true ->
+ internal_comp(Ps, File, ".S", Compile);
+ false ->
+ internal_comp(Ps, File, ".erl", Compile)
+ end
+ end
+ end.
+
+internal_comp(Passes, File, Suffix, St0) ->
+ Dir = filename:dirname(File),
+ Base = filename:basename(File, Suffix),
+ St1 = St0#compile{filename=File, dir=Dir, base=Base,
+ ifile=erlfile(Dir, Base, Suffix),
+ ofile=objfile(Base, St0)},
+ Run = case member(time, St1#compile.options) of
+ true ->
+ io:format("Compiling ~p\n", [File]),
+ fun run_tc/2;
+ false -> fun({_Name,Fun}, St) -> catch Fun(St) end
+ end,
+ case fold_comp(Passes, Run, St1) of
+ {ok,St2} -> comp_ret_ok(St2);
+ {error,St2} -> comp_ret_err(St2)
+ end.
+
+fold_comp([{Name,Test,Pass}|Ps], Run, St) ->
+ case Test(St) of
+ false -> %Pass is not needed.
+ fold_comp(Ps, Run, St);
+ true -> %Run pass in the usual way.
+ fold_comp([{Name,Pass}|Ps], Run, St)
+ end;
+fold_comp([{Name,Pass}|Ps], Run, St0) ->
+ case Run({Name,Pass}, St0) of
+ {ok,St1} -> fold_comp(Ps, Run, St1);
+ {error,St1} -> {error,St1};
+ {'EXIT',Reason} ->
+ Es = [{St0#compile.ifile,[{none,?MODULE,{crash,Name,Reason}}]}],
+ {error,St0#compile{errors=St0#compile.errors ++ Es}};
+ Other ->
+ Es = [{St0#compile.ifile,[{none,?MODULE,{bad_return,Name,Other}}]}],
+ {error,St0#compile{errors=St0#compile.errors ++ Es}}
+ end;
+fold_comp([], _Run, St) -> {ok,St}.
+
+os_process_size() ->
+ case os:type() of
+ {unix, sunos} ->
+ Size = os:cmd("ps -o vsz -p " ++ os:getpid() ++ " | tail -1"),
+ list_to_integer(lib:nonl(Size));
+ _ ->
+ 0
+ end.
+
+run_tc({Name,Fun}, St) ->
+ Before0 = statistics(runtime),
+ Val = (catch Fun(St)),
+ After0 = statistics(runtime),
+ {Before_c, _} = Before0,
+ {After_c, _} = After0,
+ io:format(" ~-30s: ~10.3f s (~w k)\n",
+ [Name, (After_c-Before_c) / 1000, os_process_size()]),
+ Val.
+
+comp_ret_ok(#compile{code=Code,warnings=Warn,module=Mod,options=Opts}=St) ->
+ report_warnings(St),
+ Ret1 = case member(binary, Opts) andalso not member(no_code_generation, Opts) of
+ true -> [Code];
+ false -> []
+ end,
+ Ret2 = case member(return_warnings, Opts) of
+ true -> Ret1 ++ [Warn];
+ false -> Ret1
+ end,
+ list_to_tuple([ok,Mod|Ret2]).
+
+comp_ret_err(St) ->
+ report_errors(St),
+ report_warnings(St),
+ case member(return_errors, St#compile.options) of
+ true -> {error,St#compile.errors,St#compile.warnings};
+ false -> error
+ end.
+
+%% passes(form|file, [Option]) -> [{Name,PassFun}]
+%% Figure out which passes that need to be run.
+
+passes(forms, Opts) ->
+ select_passes(standard_passes(), Opts);
+passes(file, Opts) ->
+ case member(from_beam, Opts) of
+ true ->
+ Ps = [?pass(read_beam_file)|binary_passes()],
+ select_passes(Ps, Opts);
+ false ->
+ Ps = case member(from_asm, Opts) orelse member(asm, Opts) of
+ true ->
+ [?pass(beam_consult_asm)|asm_passes()];
+ false ->
+ case member(from_core, Opts) of
+ true ->
+ [?pass(parse_core)|core_passes()];
+ false ->
+ [?pass(parse_module)|standard_passes()]
+ end
+ end,
+ Fs = select_passes(Ps, Opts),
+
+ %% If the last pass saves the resulting binary to a file,
+ %% insert a first pass to remove the file.
+ case last(Fs) of
+ {save_binary,_Fun} -> [?pass(remove_file)|Fs];
+ _Other -> Fs
+ end
+ end.
+
+%% select_passes([Command], Opts) -> [{Name,Function}]
+%% Interpret the lists of commands to return a pure list of passes.
+%%
+%% Command can be one of:
+%%
+%% {pass,Mod} Will be expanded to a call to the external
+%% function Mod:module(Code, Options). This
+%% function must transform the code and return
+%% {ok,NewCode} or {error,Term}.
+%% Example: {pass,beam_codegen}
+%%
+%% {Name,Fun} Name is an atom giving the name of the pass.
+%% Fun is an 'fun' taking one argument: a compile record.
+%% The fun should return {ok,NewCompileRecord} or
+%% {error,NewCompileRecord}.
+%% Note: ?pass(Name) is equvivalent to {Name,fun Name/1}.
+%% Example: ?pass(parse_module)
+%%
+%% {Name,Test,Fun} Like {Name,Fun} above, but the pass will be run
+%% (and listed by the `time' option) only if Test(St)
+%% returns true.
+%%
+%% {src_listing,Ext} Produces an Erlang source listing with the
+%% the file extension Ext. (Ext should not contain
+%% a period.) No more passes will be run.
+%%
+%% {listing,Ext} Produce an listing of the terms in the internal
+%% representation. The extension of the listing
+%% file will be Ext. (Ext should not contain
+%% a period.) No more passes will be run.
+%%
+%% {done,Ext} End compilation at this point. Produce a listing
+%% as with {listing,Ext}, unless 'binary' is
+%% specified, in which case the current
+%% representation of the code is returned without
+%% creating an output file.
+%%
+%% {iff,Flag,Cmd} If the given Flag is given in the option list,
+%% Cmd will be interpreted as a command.
+%% Otherwise, Cmd will be ignored.
+%% Example: {iff,dcg,{listing,"codegen}}
+%%
+%% {unless,Flag,Cmd} If the given Flag is NOT given in the option list,
+%% Cmd will be interpreted as a command.
+%% Otherwise, Cmd will be ignored.
+%% Example: {unless,no_kernopt,{pass,sys_kernopt}}
+%%
+
+select_passes([{pass,Mod}|Ps], Opts) ->
+ F = fun(St) ->
+ case catch Mod:module(St#compile.code, St#compile.options) of
+ {ok,Code} ->
+ {ok,St#compile{code=Code}};
+ {error,Es} ->
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end
+ end,
+ [{Mod,F}|select_passes(Ps, Opts)];
+select_passes([{src_listing,Ext}|_], _Opts) ->
+ [{listing,fun (St) -> src_listing(Ext, St) end}];
+select_passes([{listing,Ext}|_], _Opts) ->
+ [{listing,fun (St) -> listing(Ext, St) end}];
+select_passes([{done,Ext}|_], Opts) ->
+ select_passes([{unless,binary,{listing,Ext}}], Opts);
+select_passes([{iff,Flag,Pass}|Ps], Opts) ->
+ select_cond(Flag, true, Pass, Ps, Opts);
+select_passes([{unless,Flag,Pass}|Ps], Opts) ->
+ select_cond(Flag, false, Pass, Ps, Opts);
+select_passes([{_,Fun}=P|Ps], Opts) when is_function(Fun) ->
+ [P|select_passes(Ps, Opts)];
+select_passes([{_,Test,Fun}=P|Ps], Opts) when is_function(Test),
+ is_function(Fun) ->
+ [P|select_passes(Ps, Opts)];
+select_passes([], _Opts) ->
+ [];
+select_passes([List|Ps], Opts) when is_list(List) ->
+ case select_passes(List, Opts) of
+ [] -> select_passes(Ps, Opts);
+ Nested ->
+ case last(Nested) of
+ {listing,_Fun} -> Nested;
+ _Other -> Nested ++ select_passes(Ps, Opts)
+ end
+ end.
+
+select_cond(Flag, ShouldBe, Pass, Ps, Opts) ->
+ ShouldNotBe = not ShouldBe,
+ case member(Flag, Opts) of
+ ShouldBe -> select_passes([Pass|Ps], Opts);
+ ShouldNotBe -> select_passes(Ps, Opts)
+ end.
+
+%% The standard passes (almost) always run.
+
+standard_passes() ->
+ [?pass(transform_module),
+ {iff,'dpp',{listing,"pp"}},
+ ?pass(lint_module),
+ {iff,'P',{src_listing,"P"}},
+ {iff,'to_pp',{done,"P"}},
+
+ {iff,'dabstr',{listing,"abstr"}},
+ {iff,debug_info,?pass(save_abstract_code)},
+
+ ?pass(expand_module),
+ {iff,'dexp',{listing,"expand"}},
+ {iff,'E',{src_listing,"E"}},
+ {iff,'to_exp',{done,"E"}},
+
+ %% Conversion to Core Erlang.
+ ?pass(core_module),
+ {iff,'dcore',{listing,"core"}},
+ {iff,'to_core0',{done,"core"}}
+ | core_passes()].
+
+core_passes() ->
+ %% Optimization and transforms of Core Erlang code.
+ [{unless,no_copt,
+ [{core_old_inliner,fun test_old_inliner/1,fun core_old_inliner/1},
+ ?pass(core_fold_module),
+ {core_inline_module,fun test_core_inliner/1,fun core_inline_module/1},
+ {core_fold_after_inline,fun test_core_inliner/1,fun core_fold_module/1},
+ ?pass(core_transforms)]},
+ {iff,dcopt,{listing,"copt"}},
+ {iff,'to_core',{done,"core"}}
+ | kernel_passes()].
+
+kernel_passes() ->
+ %% Destructive setelement/3 optimization and core lint.
+ [?pass(core_dsetel_module),
+ {iff,clint,?pass(core_lint_module)},
+ {iff,core,?pass(save_core_code)},
+
+ %% Kernel Erlang and code generation.
+ ?pass(kernel_module),
+ {iff,dkern,{listing,"kernel"}},
+ {iff,'to_kernel',{done,"kernel"}},
+ {pass,v3_life},
+ {iff,dlife,{listing,"life"}},
+ {pass,v3_codegen},
+ {iff,dcg,{listing,"codegen"}}
+ | asm_passes()].
+
+asm_passes() ->
+ %% Assembly level optimisations.
+ [{unless,no_postopt,
+ [{pass,beam_block},
+ {iff,dblk,{listing,"block"}},
+ {unless,no_bopt,{pass,beam_bool}},
+ {iff,dbool,{listing,"bool"}},
+ {unless,no_topt,{pass,beam_type}},
+ {iff,dtype,{listing,"type"}},
+ {pass,beam_dead}, %Must always run since it splits blocks.
+ {iff,ddead,{listing,"dead"}},
+ {unless,no_jopt,{pass,beam_jump}},
+ {iff,djmp,{listing,"jump"}},
+ {pass,beam_clean},
+ {iff,dclean,{listing,"clean"}},
+ {pass,beam_flatten}]},
+
+ %% If post optimizations are turned off, we still coalesce
+ %% adjacent labels and remove unused labels to keep the
+ %% HiPE compiler happy.
+ {iff,no_postopt,
+ [?pass(beam_unused_labels),
+ {pass,beam_clean}]},
+
+ {iff,dopt,{listing,"optimize"}},
+ {iff,'S',{listing,"S"}},
+ {iff,'to_asm',{done,"S"}},
+
+ {pass,beam_validator},
+ ?pass(beam_asm)
+ | binary_passes()].
+
+binary_passes() ->
+ [{native_compile,fun test_native/1,fun native_compile/1},
+ {unless,binary,?pass(save_binary)}].
+
+%%%
+%%% Compiler passes.
+%%%
+
+%% Remove the target file so we don't have an old one if the compilation fail.
+remove_file(St) ->
+ file:delete(St#compile.ofile),
+ {ok,St}.
+
+-record(asm_module, {module,
+ exports,
+ labels,
+ functions=[],
+ cfun,
+ code,
+ attributes=[]}).
+
+preprocess_asm_forms(Forms) ->
+ R = #asm_module{},
+ R1 = collect_asm(Forms, R),
+ {R1#asm_module.module,
+ {R1#asm_module.module,
+ R1#asm_module.exports,
+ R1#asm_module.attributes,
+ R1#asm_module.functions,
+ R1#asm_module.labels}}.
+
+collect_asm([], R) ->
+ case R#asm_module.cfun of
+ undefined ->
+ R;
+ {A,B,C} ->
+ R#asm_module{functions=R#asm_module.functions++
+ [{function,A,B,C,R#asm_module.code}]}
+ end;
+collect_asm([{module,M} | Rest], R) ->
+ collect_asm(Rest, R#asm_module{module=M});
+collect_asm([{exports,M} | Rest], R) ->
+ collect_asm(Rest, R#asm_module{exports=M});
+collect_asm([{labels,M} | Rest], R) ->
+ collect_asm(Rest, R#asm_module{labels=M});
+collect_asm([{function,A,B,C} | Rest], R) ->
+ R1 = case R#asm_module.cfun of
+ undefined ->
+ R;
+ {A0,B0,C0} ->
+ R#asm_module{functions=R#asm_module.functions++
+ [{function,A0,B0,C0,R#asm_module.code}]}
+ end,
+ collect_asm(Rest, R1#asm_module{cfun={A,B,C}, code=[]});
+collect_asm([{attributes, Attr} | Rest], R) ->
+ collect_asm(Rest, R#asm_module{attributes=Attr});
+collect_asm([X | Rest], R) ->
+ collect_asm(Rest, R#asm_module{code=R#asm_module.code++[X]}).
+
+beam_consult_asm(St) ->
+ case file:consult(St#compile.ifile) of
+ {ok, Forms0} ->
+ {Module, Forms} = preprocess_asm_forms(Forms0),
+ {ok,St#compile{module=Module, code=Forms}};
+ {error,E} ->
+ Es = [{St#compile.ifile,[{none,?MODULE,{open,E}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+read_beam_file(St) ->
+ case file:read_file(St#compile.ifile) of
+ {ok,Beam} ->
+ Infile = St#compile.ifile,
+ case is_too_old(Infile) of
+ true ->
+ {ok,St#compile{module=none,code=none}};
+ false ->
+ Mod0 = filename:rootname(filename:basename(Infile)),
+ Mod = list_to_atom(Mod0),
+ {ok,St#compile{module=Mod,code=Beam,ofile=Infile}}
+ end;
+ {error,E} ->
+ Es = [{St#compile.ifile,[{none,?MODULE,{open,E}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+is_too_old(BeamFile) ->
+ case beam_lib:chunks(BeamFile, ["CInf"]) of
+ {ok,{_,[{"CInf",Term0}]}} ->
+ Term = binary_to_term(Term0),
+ Opts = proplists:get_value(options, Term, []),
+ lists:member(no_new_funs, Opts);
+ _ -> false
+ end.
+
+parse_module(St) ->
+ Opts = St#compile.options,
+ Cwd = ".",
+ IncludePath = [Cwd, St#compile.dir|inc_paths(Opts)],
+ Tab = ets:new(compiler__tab, [protected,named_table]),
+ ets:insert(Tab, {compiler_options,Opts}),
+ R = epp:parse_file(St#compile.ifile, IncludePath, pre_defs(Opts)),
+ ets:delete(Tab),
+ case R of
+ {ok,Forms} ->
+ {ok,St#compile{code=Forms}};
+ {error,E} ->
+ Es = [{St#compile.ifile,[{none,?MODULE,{epp,E}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+parse_core(St) ->
+ case file:read_file(St#compile.ifile) of
+ {ok,Bin} ->
+ case core_scan:string(binary_to_list(Bin)) of
+ {ok,Toks,_} ->
+ case core_parse:parse(Toks) of
+ {ok,Mod} ->
+ Name = (Mod#c_module.name)#c_atom.val,
+ {ok,St#compile{module=Name,code=Mod}};
+ {error,E} ->
+ Es = [{St#compile.ifile,[E]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end;
+ {error,E,_} ->
+ Es = [{St#compile.ifile,[E]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end;
+ {error,E} ->
+ Es = [{St#compile.ifile,[{none,compile,{open,E}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+compile_options([{attribute,_L,compile,C}|Fs]) when is_list(C) ->
+ C ++ compile_options(Fs);
+compile_options([{attribute,_L,compile,C}|Fs]) ->
+ [C|compile_options(Fs)];
+compile_options([_F|Fs]) -> compile_options(Fs);
+compile_options([]) -> [].
+
+transforms(Os) -> [ M || {parse_transform,M} <- Os ].
+
+transform_module(St) ->
+ %% Extract compile options from code into options field.
+ Ts = transforms(St#compile.options ++ compile_options(St#compile.code)),
+ foldl_transform(St, Ts).
+
+foldl_transform(St, [T|Ts]) ->
+ Name = "transform " ++ atom_to_list(T),
+ Fun = fun(S) -> T:parse_transform(S#compile.code, S#compile.options) end,
+ Run = case member(time, St#compile.options) of
+ true -> fun run_tc/2;
+ false -> fun({_Name,F}, S) -> catch F(S) end
+ end,
+ case Run({Name, Fun}, St) of
+ {error,Es,Ws} ->
+ {error,St#compile{warnings=St#compile.warnings ++ Ws,
+ errors=St#compile.errors ++ Es}};
+ {'EXIT',R} ->
+ Es = [{St#compile.ifile,[{none,compile,{parse_transform,T,R}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}};
+ Forms ->
+ foldl_transform(St#compile{code=Forms}, Ts)
+ end;
+foldl_transform(St, []) -> {ok,St}.
+
+get_core_transforms(Opts) -> [M || {core_transform,M} <- Opts].
+
+core_transforms(St) ->
+ %% The options field holds the complete list of options at this
+
+ Ts = get_core_transforms(St#compile.options),
+ foldl_core_transforms(St, Ts).
+
+foldl_core_transforms(St, [T|Ts]) ->
+ Name = "core transform " ++ atom_to_list(T),
+ Fun = fun(S) -> T:core_transform(S#compile.code, S#compile.options) end,
+ Run = case member(time, St#compile.options) of
+ true -> fun run_tc/2;
+ false -> fun({_Name,F}, S) -> catch F(S) end
+ end,
+ case Run({Name, Fun}, St) of
+ {'EXIT',R} ->
+ Es = [{St#compile.ifile,[{none,compile,{core_transform,T,R}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}};
+ Forms ->
+ foldl_core_transforms(St#compile{code=Forms}, Ts)
+ end;
+foldl_core_transforms(St, []) -> {ok,St}.
+
+%%% Fetches the module name from a list of forms. The module attribute must
+%%% be present.
+get_module([{attribute,_,module,{M,_As}} | _]) -> M;
+get_module([{attribute,_,module,M} | _]) -> M;
+get_module([_ | Rest]) ->
+ get_module(Rest).
+
+%%% A #compile state is returned, where St.base has been filled in
+%%% with the module name from Forms, as a string, in case it wasn't
+%%% set in St (i.e., it was "").
+add_default_base(St, Forms) ->
+ F = St#compile.filename,
+ case F of
+ "" ->
+ M = get_module(Forms),
+ St#compile{base = atom_to_list(M)};
+ _ ->
+ St
+ end.
+
+lint_module(St) ->
+ case erl_lint:module(St#compile.code,
+ St#compile.ifile, St#compile.options) of
+ {ok,Ws} ->
+ %% Insert name of module as base name, if needed. This is
+ %% for compile:forms to work with listing files.
+ St1 = add_default_base(St, St#compile.code),
+ {ok,St1#compile{warnings=St1#compile.warnings ++ Ws}};
+ {error,Es,Ws} ->
+ {error,St#compile{warnings=St#compile.warnings ++ Ws,
+ errors=St#compile.errors ++ Es}}
+ end.
+
+core_lint_module(St) ->
+ case core_lint:module(St#compile.code, St#compile.options) of
+ {ok,Ws} ->
+ {ok,St#compile{warnings=St#compile.warnings ++ Ws}};
+ {error,Es,Ws} ->
+ {error,St#compile{warnings=St#compile.warnings ++ Ws,
+ errors=St#compile.errors ++ Es}}
+ end.
+
+%% expand_module(State) -> State'
+%% Do the common preprocessing of the input forms.
+
+expand_module(#compile{code=Code,options=Opts0}=St0) ->
+ {Mod,Exp,Forms,Opts1} = sys_pre_expand:module(Code, Opts0),
+ Opts2 = expand_opts(Opts1),
+ Opts = filter_opts(Opts2),
+ {ok,St0#compile{module=Mod,options=Opts,code={Mod,Exp,Forms}}}.
+
+core_module(#compile{code=Code0,options=Opts,ifile=File}=St) ->
+ {ok,Code,Ws} = v3_core:module(Code0, Opts),
+ {ok,St#compile{code=Code,warnings=St#compile.warnings ++ [{File,Ws}]}}.
+
+core_fold_module(#compile{code=Code0,options=Opts,ifile=File}=St) ->
+ {ok,Code,Ws} = sys_core_fold:module(Code0, Opts),
+ {ok,St#compile{code=Code,warnings=St#compile.warnings ++ [{File,Ws}]}}.
+
+test_old_inliner(#compile{options=Opts}) ->
+ %% The point of this test is to avoid loading the old inliner
+ %% if we know that it will not be used.
+ case any(fun(no_inline) -> true;
+ (_) -> false
+ end, Opts) of
+ true -> false;
+ false ->
+ any(fun({inline,_}) -> true;
+ (_) -> false
+ end, Opts)
+ end.
+
+test_core_inliner(#compile{options=Opts}) ->
+ case any(fun(no_inline) -> true;
+ (_) -> false
+ end, Opts) of
+ true -> false;
+ false ->
+ any(fun(inline) -> true;
+ (_) -> false
+ end, Opts)
+ end.
+
+core_old_inliner(#compile{code=Code0,options=Opts}=St) ->
+ case catch sys_core_inline:module(Code0, Opts) of
+ {ok,Code} ->
+ {ok,St#compile{code=Code}};
+ {error,Es} ->
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+core_inline_module(#compile{code=Code0,options=Opts}=St) ->
+ Code = cerl_inline:core_transform(Code0, Opts),
+ {ok,St#compile{code=Code}}.
+
+core_dsetel_module(#compile{code=Code0,options=Opts}=St) ->
+ {ok,Code} = sys_core_dsetel:module(Code0, Opts),
+ {ok,St#compile{code=Code}}.
+
+kernel_module(#compile{code=Code0,options=Opts,ifile=File}=St) ->
+ {ok,Code,Ws} = v3_kernel:module(Code0, Opts),
+ {ok,St#compile{code=Code,warnings=St#compile.warnings ++ [{File,Ws}]}}.
+
+save_abstract_code(St) ->
+ {ok,St#compile{abstract_code=abstract_code(St)}}.
+
+abstract_code(#compile{code=Code}) ->
+ Abstr = {raw_abstract_v1,Code},
+ case catch erlang:term_to_binary(Abstr, [compressed]) of
+ {'EXIT',_} -> term_to_binary(Abstr);
+ Other -> Other
+ end.
+
+save_core_code(St) ->
+ {ok,St#compile{core_code=cerl:from_records(St#compile.code)}}.
+
+beam_unused_labels(#compile{code=Code0}=St) ->
+ Code = beam_jump:module_labels(Code0),
+ {ok,St#compile{code=Code}}.
+
+beam_asm(#compile{ifile=File,code=Code0,abstract_code=Abst,options=Opts0}=St) ->
+ Source = filename:absname(File),
+ Opts = filter(fun is_informative_option/1, Opts0),
+ case beam_asm:module(Code0, Abst, Source, Opts) of
+ {ok,Code} -> {ok,St#compile{code=Code,abstract_code=[]}};
+ {error,Es} -> {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+test_native(#compile{options=Opts}) ->
+ %% This test must be made late, because the r7 or no_new_funs options
+ %% will turn off the native option.
+ member(native, Opts).
+
+native_compile(#compile{code=none}=St) -> {ok,St};
+native_compile(St) ->
+ case erlang:system_info(hipe_architecture) of
+ undefined ->
+ Ws = [{St#compile.ifile,[{none,compile,no_native_support}]}],
+ {ok,St#compile{warnings=St#compile.warnings ++ Ws}};
+ _ ->
+ native_compile_1(St)
+ end.
+
+native_compile_1(St) ->
+ Opts0 = [no_new_binaries|St#compile.options],
+ IgnoreErrors = member(ignore_native_errors, Opts0),
+ Opts = case keysearch(hipe, 1, Opts0) of
+ {value,{hipe,L}} when list(L) -> L;
+ {value,{hipe,X}} -> [X];
+ _ -> []
+ end,
+ case catch hipe:compile(St#compile.module,
+ St#compile.core_code,
+ St#compile.code,
+ Opts) of
+ {ok, {Type,Bin}} when binary(Bin) ->
+ {ok, embed_native_code(St, {Type,Bin})};
+ {error, R} ->
+ case IgnoreErrors of
+ true ->
+ Ws = [{St#compile.ifile,[{none,?MODULE,{native,R}}]}],
+ {ok,St#compile{warnings=St#compile.warnings ++ Ws}};
+ false ->
+ Es = [{St#compile.ifile,[{none,?MODULE,{native,R}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end;
+ {'EXIT',R} ->
+ case IgnoreErrors of
+ true ->
+ Ws = [{St#compile.ifile,[{none,?MODULE,{native_crash,R}}]}],
+ {ok,St#compile{warnings=St#compile.warnings ++ Ws}};
+ false ->
+ exit(R)
+ end
+ end.
+
+embed_native_code(St, {Architecture,NativeCode}) ->
+ {ok, _, Chunks0} = beam_lib:all_chunks(St#compile.code),
+ ChunkName = hipe_unified_loader:chunk_name(Architecture),
+ Chunks1 = lists:keydelete(ChunkName, 1, Chunks0),
+ Chunks = Chunks1 ++ [{ChunkName,NativeCode}],
+ {ok, BeamPlusNative} = beam_lib:build_module(Chunks),
+ St#compile{code=BeamPlusNative}.
+
+%% Returns true if the option is informative and therefore should be included
+%% in the option list of the compiled module.
+
+is_informative_option(beam) -> false;
+is_informative_option(report_warnings) -> false;
+is_informative_option(report_errors) -> false;
+is_informative_option(binary) -> false;
+is_informative_option(verbose) -> false;
+is_informative_option(_) -> true.
+
+save_binary(#compile{code=none}=St) -> {ok,St};
+save_binary(St) ->
+ Tfile = tmpfile(St#compile.ofile), %Temp working file
+ case write_binary(Tfile, St#compile.code, St) of
+ ok ->
+ case file:rename(Tfile, St#compile.ofile) of
+ ok ->
+ {ok,St};
+ {error,_Error} ->
+ file:delete(Tfile),
+ Es = [{St#compile.ofile,[{none,?MODULE,{rename,Tfile}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end;
+ {error,_Error} ->
+ Es = [{Tfile,[{compile,write_error}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+write_binary(Name, Bin, St) ->
+ Opts = case member(compressed, St#compile.options) of
+ true -> [compressed];
+ false -> []
+ end,
+ case file:write_file(Name, Bin, Opts) of
+ ok -> ok;
+ {error,_}=Error -> Error
+ end.
+
+%% report_errors(State) -> ok
+%% report_warnings(State) -> ok
+
+report_errors(St) ->
+ case member(report_errors, St#compile.options) of
+ true ->
+ foreach(fun ({{F,_L},Eds}) -> list_errors(F, Eds);
+ ({F,Eds}) -> list_errors(F, Eds) end,
+ St#compile.errors);
+ false -> ok
+ end.
+
+report_warnings(#compile{options=Opts,warnings=Ws0}) ->
+ case member(report_warnings, Opts) of
+ true ->
+ Ws1 = flatmap(fun({{F,_L},Eds}) -> format_message(F, Eds);
+ ({F,Eds}) -> format_message(F, Eds) end,
+ Ws0),
+ Ws = ordsets:from_list(Ws1),
+ foreach(fun({_,Str}) -> io:put_chars(Str) end, Ws);
+ false -> ok
+ end.
+
+format_message(F, [{Line,Mod,E}|Es]) ->
+ M = {Line,io_lib:format("~s:~w: Warning: ~s\n", [F,Line,Mod:format_error(E)])},
+ [M|format_message(F, Es)];
+format_message(F, [{Mod,E}|Es]) ->
+ M = {none,io_lib:format("~s: Warning: ~s\n", [F,Mod:format_error(E)])},
+ [M|format_message(F, Es)];
+format_message(_, []) -> [].
+
+%% list_errors(File, ErrorDescriptors) -> ok
+
+list_errors(F, [{Line,Mod,E}|Es]) ->
+ io:fwrite("~s:~w: ~s\n", [F,Line,Mod:format_error(E)]),
+ list_errors(F, Es);
+list_errors(F, [{Mod,E}|Es]) ->
+ io:fwrite("~s: ~s\n", [F,Mod:format_error(E)]),
+ list_errors(F, Es);
+list_errors(_F, []) -> ok.
+
+%% erlfile(Dir, Base) -> ErlFile
+%% outfile(Base, Extension, Options) -> OutputFile
+%% objfile(Base, Target, Options) -> ObjFile
+%% tmpfile(ObjFile) -> TmpFile
+%% Work out the correct input and output file names.
+
+iofile(File) when atom(File) ->
+ iofile(atom_to_list(File));
+iofile(File) ->
+ {filename:dirname(File), filename:basename(File, ".erl")}.
+
+erlfile(Dir, Base, Suffix) ->
+ filename:join(Dir, Base++Suffix).
+
+outfile(Base, Ext, Opts) when atom(Ext) ->
+ outfile(Base, atom_to_list(Ext), Opts);
+outfile(Base, Ext, Opts) ->
+ Obase = case keysearch(outdir, 1, Opts) of
+ {value, {outdir, Odir}} -> filename:join(Odir, Base);
+ _Other -> Base % Not found or bad format
+ end,
+ Obase++"."++Ext.
+
+objfile(Base, St) ->
+ outfile(Base, "beam", St#compile.options).
+
+tmpfile(Ofile) ->
+ reverse([$#|tl(reverse(Ofile))]).
+
+%% pre_defs(Options)
+%% inc_paths(Options)
+%% Extract the predefined macros and include paths from the option list.
+
+pre_defs([{d,M,V}|Opts]) ->
+ [{M,V}|pre_defs(Opts)];
+pre_defs([{d,M}|Opts]) ->
+ [M|pre_defs(Opts)];
+pre_defs([_|Opts]) ->
+ pre_defs(Opts);
+pre_defs([]) -> [].
+
+inc_paths(Opts) ->
+ [ P || {i,P} <- Opts, list(P) ].
+
+src_listing(Ext, St) ->
+ listing(fun (Lf, {_Mod,_Exp,Fs}) -> do_src_listing(Lf, Fs);
+ (Lf, Fs) -> do_src_listing(Lf, Fs) end,
+ Ext, St).
+
+do_src_listing(Lf, Fs) ->
+ foreach(fun (F) -> io:put_chars(Lf, [erl_pp:form(F),"\n"]) end,
+ Fs).
+
+listing(Ext, St) ->
+ listing(fun(Lf, Fs) -> beam_listing:module(Lf, Fs) end, Ext, St).
+
+listing(LFun, Ext, St) ->
+ Lfile = outfile(St#compile.base, Ext, St#compile.options),
+ case file:open(Lfile, [write,delayed_write]) of
+ {ok,Lf} ->
+ LFun(Lf, St#compile.code),
+ ok = file:close(Lf),
+ {ok,St};
+ {error,_Error} ->
+ Es = [{Lfile,[{none,compile,write_error}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
+ end.
+
+options() ->
+ help(standard_passes()).
+
+help([{iff,Flag,{src_listing,Ext}}|T]) ->
+ io:fwrite("~p - Generate .~s source listing file\n", [Flag,Ext]),
+ help(T);
+help([{iff,Flag,{listing,Ext}}|T]) ->
+ io:fwrite("~p - Generate .~s file\n", [Flag,Ext]),
+ help(T);
+help([{iff,Flag,{Name,Fun}}|T]) when function(Fun) ->
+ io:fwrite("~p - Run ~s\n", [Flag,Name]),
+ help(T);
+help([{iff,_Flag,Action}|T]) ->
+ help(Action),
+ help(T);
+help([{unless,Flag,{pass,Pass}}|T]) ->
+ io:fwrite("~p - Skip the ~s pass\n", [Flag,Pass]),
+ help(T);
+help([{unless,no_postopt=Flag,List}|T]) when list(List) ->
+ %% Hard-coded knowledgde here.
+ io:fwrite("~p - Skip all post optimisation\n", [Flag]),
+ help(List),
+ help(T);
+help([{unless,_Flag,Action}|T]) ->
+ help(Action),
+ help(T);
+help([_|T]) ->
+ help(T);
+help(_) ->
+ ok.
+
+
+%% compile(AbsFileName, Outfilename, Options)
+%% Compile entry point for erl_compile.
+
+compile(File0, _OutFile, Options) ->
+ File = shorten_filename(File0),
+ case file(File, make_erl_options(Options)) of
+ {ok,_Mod} -> ok;
+ Other -> Other
+ end.
+
+compile_beam(File0, _OutFile, Opts) ->
+ File = shorten_filename(File0),
+ case file(File, [from_beam|make_erl_options(Opts)]) of
+ {ok,_Mod} -> ok;
+ Other -> Other
+ end.
+
+compile_asm(File0, _OutFile, Opts) ->
+ File = shorten_filename(File0),
+ case file(File, [asm|make_erl_options(Opts)]) of
+ {ok,_Mod} -> ok;
+ Other -> Other
+ end.
+
+compile_core(File0, _OutFile, Opts) ->
+ File = shorten_filename(File0),
+ case file(File, [from_core|make_erl_options(Opts)]) of
+ {ok,_Mod} -> ok;
+ Other -> Other
+ end.
+
+shorten_filename(Name0) ->
+ {ok,Cwd} = file:get_cwd(),
+ case lists:prefix(Cwd, Name0) of
+ false -> Name0;
+ true ->
+ Name = case lists:nthtail(length(Cwd), Name0) of
+ "/"++N -> N;
+ N -> N
+ end,
+ Name
+ end.
+
+%% Converts generic compiler options to specific options.
+
+make_erl_options(Opts) ->
+
+ %% This way of extracting will work even if the record passed
+ %% has more fields than known during compilation.
+
+ Includes = Opts#options.includes,
+ Defines = Opts#options.defines,
+ Outdir = Opts#options.outdir,
+ Warning = Opts#options.warning,
+ Verbose = Opts#options.verbose,
+ Specific = Opts#options.specific,
+ OutputType = Opts#options.output_type,
+ Cwd = Opts#options.cwd,
+
+ Options =
+ case Verbose of
+ true -> [verbose];
+ false -> []
+ end ++
+ case Warning of
+ 0 -> [];
+ _ -> [report_warnings]
+ end ++
+ map(
+ fun ({Name, Value}) ->
+ {d, Name, Value};
+ (Name) ->
+ {d, Name}
+ end,
+ Defines) ++
+ case OutputType of
+ undefined -> [];
+ jam -> [jam];
+ beam -> [beam];
+ native -> [native]
+ end,
+
+ Options++[report_errors, {cwd, Cwd}, {outdir, Outdir}|
+ map(fun(Dir) -> {i, Dir} end, Includes)]++Specific.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lib.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lib.erl
new file mode 100644
index 0000000000..3a6158286f
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lib.erl
@@ -0,0 +1,509 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: core_lib.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose: Core Erlang abstract syntax functions.
+
+-module(core_lib).
+
+-export([get_anno/1,set_anno/2]).
+-export([is_atomic/1,is_literal/1,is_literal_list/1,
+ is_simple/1,is_simple_list/1,is_simple_top/1]).
+-export([literal_value/1,make_literal/1]).
+-export([make_values/1]).
+-export([map/2, fold/3, mapfold/3]).
+-export([is_var_used/2]).
+
+%% -compile([export_all]).
+
+-include("core_parse.hrl").
+
+%% get_anno(Core) -> Anno.
+%% set_anno(Core, Anno) -> Core.
+%% Generic get/set annotation.
+
+get_anno(C) -> element(2, C).
+set_anno(C, A) -> setelement(2, C, A).
+
+%% is_atomic(Expr) -> true | false.
+
+is_atomic(#c_char{}) -> true;
+is_atomic(#c_int{}) -> true;
+is_atomic(#c_float{}) -> true;
+is_atomic(#c_atom{}) -> true;
+is_atomic(#c_string{}) -> true;
+is_atomic(#c_nil{}) -> true;
+is_atomic(#c_fname{}) -> true;
+is_atomic(_) -> false.
+
+%% is_literal(Expr) -> true | false.
+
+is_literal(#c_cons{hd=H,tl=T}) ->
+ case is_literal(H) of
+ true -> is_literal(T);
+ false -> false
+ end;
+is_literal(#c_tuple{es=Es}) -> is_literal_list(Es);
+is_literal(#c_binary{segments=Es}) -> is_lit_bin(Es);
+is_literal(E) -> is_atomic(E).
+
+is_literal_list(Es) -> lists:all(fun is_literal/1, Es).
+
+is_lit_bin(Es) ->
+ lists:all(fun (#c_bitstr{val=E,size=S}) ->
+ is_literal(E) and is_literal(S)
+ end, Es).
+
+%% is_simple(Expr) -> true | false.
+
+is_simple(#c_var{}) -> true;
+is_simple(#c_cons{hd=H,tl=T}) ->
+ case is_simple(H) of
+ true -> is_simple(T);
+ false -> false
+ end;
+is_simple(#c_tuple{es=Es}) -> is_simple_list(Es);
+is_simple(#c_binary{segments=Es}) -> is_simp_bin(Es);
+is_simple(E) -> is_atomic(E).
+
+is_simple_list(Es) -> lists:all(fun is_simple/1, Es).
+
+is_simp_bin(Es) ->
+ lists:all(fun (#c_bitstr{val=E,size=S}) ->
+ is_simple(E) and is_simple(S)
+ end, Es).
+
+%% is_simple_top(Expr) -> true | false.
+%% Only check if the top-level is a simple.
+
+is_simple_top(#c_var{}) -> true;
+is_simple_top(#c_cons{}) -> true;
+is_simple_top(#c_tuple{}) -> true;
+is_simple_top(#c_binary{}) -> true;
+is_simple_top(E) -> is_atomic(E).
+
+%% literal_value(LitExpr) -> Value.
+%% Return the value of LitExpr.
+
+literal_value(#c_char{val=C}) -> C;
+literal_value(#c_int{val=I}) -> I;
+literal_value(#c_float{val=F}) -> F;
+literal_value(#c_atom{val=A}) -> A;
+literal_value(#c_string{val=S}) -> S;
+literal_value(#c_nil{}) -> [];
+literal_value(#c_cons{hd=H,tl=T}) ->
+ [literal_value(H)|literal_value(T)];
+literal_value(#c_tuple{es=Es}) ->
+ list_to_tuple(literal_value_list(Es)).
+
+literal_value_list(Vals) -> lists:map(fun literal_value/1, Vals).
+
+%% make_literal(Value) -> LitExpr.
+%% Make a literal expression from an Erlang value.
+
+make_literal(I) when integer(I) -> #c_int{val=I};
+make_literal(F) when float(F) -> #c_float{val=F};
+make_literal(A) when atom(A) -> #c_atom{val=A};
+make_literal([]) -> #c_nil{};
+make_literal([H|T]) ->
+ #c_cons{hd=make_literal(H),tl=make_literal(T)};
+make_literal(T) when tuple(T) ->
+ #c_tuple{es=make_literal_list(tuple_to_list(T))}.
+
+make_literal_list(Vals) -> lists:map(fun make_literal/1, Vals).
+
+%% make_values([CoreExpr] | CoreExpr) -> #c_values{} | CoreExpr.
+%% Make a suitable values structure, expr or values, depending on
+%% Expr.
+
+make_values([E]) -> E;
+make_values([H|_]=Es) -> #c_values{anno=get_anno(H),es=Es};
+make_values([]) -> #c_values{es=[]};
+make_values(E) -> E.
+
+%% map(MapFun, CoreExpr) -> CoreExpr.
+%% This function traverses the core parse format, at each level
+%% applying the submited argument function, assumed to do the real
+%% work.
+%%
+%% The "eager" style, where each component of a construct are
+%% descended to before the construct itself, admits that some
+%% companion functions (the F:s) may be made simpler, since it may be
+%% safely assumed that no lower illegal instanced will be
+%% created/uncovered by actions on the current level.
+
+map(F, #c_tuple{es=Es}=R) ->
+ F(R#c_tuple{es=map_list(F, Es)});
+map(F, #c_cons{hd=Hd, tl=Tl}=R) ->
+ F(R#c_cons{hd=map(F, Hd),
+ tl=map(F, Tl)});
+map(F, #c_values{es=Es}=R) ->
+ F(R#c_values{es=map_list(F, Es)});
+
+map(F, #c_alias{var=Var, pat=Pat}=R) ->
+ F(R#c_alias{var=map(F, Var),
+ pat=map(F, Pat)});
+
+map(F, #c_module{defs=Defs}=R) ->
+ F(R#c_module{defs=map_list(F, Defs)});
+map(F, #c_def{val=Val}=R) ->
+ F(R#c_def{val=map(F, Val)});
+
+map(F, #c_fun{vars=Vars, body=Body}=R) ->
+ F(R#c_fun{vars=map_list(F, Vars),
+ body=map(F, Body)});
+map(F, #c_let{vars=Vs, arg=Arg, body=Body}=R) ->
+ F(R#c_let{vars=map_list(F, Vs),
+ arg=map(F, Arg),
+ body=map(F, Body)});
+map(F, #c_letrec{defs=Fs,body=Body}=R) ->
+ F(R#c_letrec{defs=map_list(F, Fs),
+ body=map(F, Body)});
+map(F, #c_seq{arg=Arg, body=Body}=R) ->
+ F(R#c_seq{arg=map(F, Arg),
+ body=map(F, Body)});
+map(F, #c_case{arg=Arg, clauses=Clauses}=R) ->
+ F(R#c_case{arg=map(F, Arg),
+ clauses=map_list(F, Clauses)});
+map(F, #c_clause{pats=Ps, guard=Guard, body=Body}=R) ->
+ F(R#c_clause{pats=map_list(F, Ps),
+ guard=map(F, Guard),
+ body=map(F, Body)});
+map(F, #c_receive{clauses=Cls, timeout=Tout, action=Act}=R) ->
+ F(R#c_receive{clauses=map_list(F, Cls),
+ timeout=map(F, Tout),
+ action=map(F, Act)});
+map(F, #c_apply{op=Op,args=Args}=R) ->
+ F(R#c_apply{op=map(F, Op),
+ args=map_list(F, Args)});
+map(F, #c_call{module=M,name=N,args=Args}=R) ->
+ F(R#c_call{module=map(F, M),
+ name=map(F, N),
+ args=map_list(F, Args)});
+map(F, #c_primop{name=N,args=Args}=R) ->
+ F(R#c_primop{name=map(F, N),
+ args=map_list(F, Args)});
+map(F, #c_try{arg=Expr,vars=Vars,body=Body,evars=Evars,handler=Handler}=R) ->
+ F(R#c_try{arg=map(F, Expr),
+ vars=map(F, Vars),
+ body=map(F, Body),
+ evars=map(F, Evars),
+ handler=map(F, Handler)});
+map(F, #c_catch{body=Body}=R) ->
+ F(R#c_catch{body=map(F, Body)});
+map(F, T) -> F(T). %Atomic nodes.
+
+map_list(F, L) -> lists:map(fun (E) -> map(F, E) end, L).
+
+%% fold(FoldFun, Accumulator, CoreExpr) -> Accumulator.
+%% This function traverses the core parse format, at each level
+%% applying the submited argument function, assumed to do the real
+%% work, and keeping the accumulated result in the A (accumulator)
+%% argument.
+
+fold(F, Acc, #c_tuple{es=Es}=R) ->
+ F(R, fold_list(F, Acc, Es));
+fold(F, Acc, #c_cons{hd=Hd, tl=Tl}=R) ->
+ F(R, fold(F, fold(F, Acc, Hd), Tl));
+fold(F, Acc, #c_values{es=Es}=R) ->
+ F(R, fold_list(F, Acc, Es));
+
+fold(F, Acc, #c_alias{pat=P,var=V}=R) ->
+ F(R, fold(F, fold(F, Acc, P), V));
+
+fold(F, Acc, #c_module{defs=Defs}=R) ->
+ F(R, fold_list(F, Acc, Defs));
+fold(F, Acc, #c_def{val=Val}=R) ->
+ F(R, fold(F, Acc, Val));
+
+fold(F, Acc, #c_fun{vars=Vars, body=Body}=R) ->
+ F(R, fold(F, fold_list(F, Acc, Vars), Body));
+fold(F, Acc, #c_let{vars=Vs, arg=Arg, body=Body}=R) ->
+ F(R, fold(F, fold(F, fold_list(F, Acc, Vs), Arg), Body));
+fold(F, Acc, #c_letrec{defs=Fs,body=Body}=R) ->
+ F(R, fold(F, fold_list(F, Acc, Fs), Body));
+fold(F, Acc, #c_seq{arg=Arg, body=Body}=R) ->
+ F(R, fold(F, fold(F, Acc, Arg), Body));
+fold(F, Acc, #c_case{arg=Arg, clauses=Clauses}=R) ->
+ F(R, fold_list(F, fold(F, Acc, Arg), Clauses));
+fold(F, Acc, #c_clause{pats=Ps,guard=G,body=B}=R) ->
+ F(R, fold(F, fold(F, fold_list(F, Acc, Ps), G), B));
+fold(F, Acc, #c_receive{clauses=Cl, timeout=Ti, action=Ac}=R) ->
+ F(R, fold_list(F, fold(F, fold(F, Acc, Ac), Ti), Cl));
+fold(F, Acc, #c_apply{op=Op, args=Args}=R) ->
+ F(R, fold_list(F, fold(F, Acc, Op), Args));
+fold(F, Acc, #c_call{module=Mod,name=Name,args=Args}=R) ->
+ F(R, fold_list(F, fold(F, fold(F, Acc, Mod), Name), Args));
+fold(F, Acc, #c_primop{name=Name,args=Args}=R) ->
+ F(R, fold_list(F, fold(F, Acc, Name), Args));
+fold(F, Acc, #c_try{arg=E,vars=Vs,body=Body,evars=Evs,handler=H}=R) ->
+ NewB = fold(F, fold_list(F, fold(F, Acc, E), Vs), Body),
+ F(R, fold(F, fold_list(F, NewB, Evs), H));
+fold(F, Acc, #c_catch{body=Body}=R) ->
+ F(R, fold(F, Acc, Body));
+fold(F, Acc, T) -> %Atomic nodes
+ F(T, Acc).
+
+fold_list(F, Acc, L) ->
+ lists:foldl(fun (E, A) -> fold(F, A, E) end, Acc, L).
+
+%% mapfold(MapfoldFun, Accumulator, CoreExpr) -> {CoreExpr,Accumulator}.
+%% This function traverses the core parse format, at each level
+%% applying the submited argument function, assumed to do the real
+%% work, and keeping the accumulated result in the A (accumulator)
+%% argument.
+
+mapfold(F, Acc0, #c_tuple{es=Es0}=R) ->
+ {Es1,Acc1} = mapfold_list(F, Acc0, Es0),
+ F(R#c_tuple{es=Es1}, Acc1);
+mapfold(F, Acc0, #c_cons{hd=H0,tl=T0}=R) ->
+ {H1,Acc1} = mapfold(F, Acc0, H0),
+ {T1,Acc2} = mapfold(F, Acc1, T0),
+ F(R#c_cons{hd=H1,tl=T1}, Acc2);
+mapfold(F, Acc0, #c_values{es=Es0}=R) ->
+ {Es1,Acc1} = mapfold_list(F, Acc0, Es0),
+ F(R#c_values{es=Es1}, Acc1);
+
+mapfold(F, Acc0, #c_alias{pat=P0,var=V0}=R) ->
+ {P1,Acc1} = mapfold(F, Acc0, P0),
+ {V1,Acc2} = mapfold(F, Acc1, V0),
+ F(R#c_alias{pat=P1,var=V1}, Acc2);
+
+mapfold(F, Acc0, #c_module{defs=D0}=R) ->
+ {D1,Acc1} = mapfold_list(F, Acc0, D0),
+ F(R#c_module{defs=D1}, Acc1);
+mapfold(F, Acc0, #c_def{val=V0}=R) ->
+ {V1,Acc1} = mapfold(F, Acc0, V0),
+ F(R#c_def{val=V1}, Acc1);
+
+mapfold(F, Acc0, #c_fun{vars=Vs0, body=B0}=R) ->
+ {Vs1,Acc1} = mapfold_list(F, Acc0, Vs0),
+ {B1,Acc2} = mapfold(F, Acc1, B0),
+ F(R#c_fun{vars=Vs1,body=B1}, Acc2);
+mapfold(F, Acc0, #c_let{vars=Vs0, arg=A0, body=B0}=R) ->
+ {Vs1,Acc1} = mapfold_list(F, Acc0, Vs0),
+ {A1,Acc2} = mapfold(F, Acc1, A0),
+ {B1,Acc3} = mapfold(F, Acc2, B0),
+ F(R#c_let{vars=Vs1,arg=A1,body=B1}, Acc3);
+mapfold(F, Acc0, #c_letrec{defs=Fs0,body=B0}=R) ->
+ {Fs1,Acc1} = mapfold_list(F, Acc0, Fs0),
+ {B1,Acc2} = mapfold(F, Acc1, B0),
+ F(R#c_letrec{defs=Fs1,body=B1}, Acc2);
+mapfold(F, Acc0, #c_seq{arg=A0, body=B0}=R) ->
+ {A1,Acc1} = mapfold(F, Acc0, A0),
+ {B1,Acc2} = mapfold(F, Acc1, B0),
+ F(R#c_seq{arg=A1,body=B1}, Acc2);
+mapfold(F, Acc0, #c_case{arg=A0,clauses=Cs0}=R) ->
+ {A1,Acc1} = mapfold(F, Acc0, A0),
+ {Cs1,Acc2} = mapfold_list(F, Acc1, Cs0),
+ F(R#c_case{arg=A1,clauses=Cs1}, Acc2);
+mapfold(F, Acc0, #c_clause{pats=Ps0,guard=G0,body=B0}=R) ->
+ {Ps1,Acc1} = mapfold_list(F, Acc0, Ps0),
+ {G1,Acc2} = mapfold(F, Acc1, G0),
+ {B1,Acc3} = mapfold(F, Acc2, B0),
+ F(R#c_clause{pats=Ps1,guard=G1,body=B1}, Acc3);
+mapfold(F, Acc0, #c_receive{clauses=Cs0,timeout=T0,action=A0}=R) ->
+ {T1,Acc1} = mapfold(F, Acc0, T0),
+ {Cs1,Acc2} = mapfold_list(F, Acc1, Cs0),
+ {A1,Acc3} = mapfold(F, Acc2, A0),
+ F(R#c_receive{clauses=Cs1,timeout=T1,action=A1}, Acc3);
+mapfold(F, Acc0, #c_apply{op=Op0, args=As0}=R) ->
+ {Op1,Acc1} = mapfold(F, Acc0, Op0),
+ {As1,Acc2} = mapfold_list(F, Acc1, As0),
+ F(R#c_apply{op=Op1,args=As1}, Acc2);
+mapfold(F, Acc0, #c_call{module=M0,name=N0,args=As0}=R) ->
+ {M1,Acc1} = mapfold(F, Acc0, M0),
+ {N1,Acc2} = mapfold(F, Acc1, N0),
+ {As1,Acc3} = mapfold_list(F, Acc2, As0),
+ F(R#c_call{module=M1,name=N1,args=As1}, Acc3);
+mapfold(F, Acc0, #c_primop{name=N0, args=As0}=R) ->
+ {N1,Acc1} = mapfold(F, Acc0, N0),
+ {As1,Acc2} = mapfold_list(F, Acc1, As0),
+ F(R#c_primop{name=N1,args=As1}, Acc2);
+mapfold(F, Acc0, #c_try{arg=E0,vars=Vs0,body=B0,evars=Evs0,handler=H0}=R) ->
+ {E1,Acc1} = mapfold(F, Acc0, E0),
+ {Vs1,Acc2} = mapfold_list(F, Acc1, Vs0),
+ {B1,Acc3} = mapfold(F, Acc2, B0),
+ {Evs1,Acc4} = mapfold_list(F, Acc3, Evs0),
+ {H1,Acc5} = mapfold(F, Acc4, H0),
+ F(R#c_try{arg=E1,vars=Vs1,body=B1,evars=Evs1,handler=H1}, Acc5);
+mapfold(F, Acc0, #c_catch{body=B0}=R) ->
+ {B1,Acc1} = mapfold(F, Acc0, B0),
+ F(R#c_catch{body=B1}, Acc1);
+mapfold(F, Acc, T) -> %Atomic nodes
+ F(T, Acc).
+
+mapfold_list(F, Acc, L) ->
+ lists:mapfoldl(fun (E, A) -> mapfold(F, A, E) end, Acc, L).
+
+%% is_var_used(VarName, Expr) -> true | false.
+%% Test if the variable VarName is used in Expr.
+
+is_var_used(V, B) -> vu_body(V, B).
+
+vu_body(V, #c_values{es=Es}) ->
+ vu_expr_list(V, Es);
+vu_body(V, Body) ->
+ vu_expr(V, Body).
+
+vu_expr(V, #c_var{name=V2}) -> V =:= V2;
+vu_expr(V, #c_cons{hd=H,tl=T}) ->
+ case vu_expr(V, H) of
+ true -> true;
+ false -> vu_expr(V, T)
+ end;
+vu_expr(V, #c_tuple{es=Es}) ->
+ vu_expr_list(V, Es);
+vu_expr(V, #c_binary{segments=Ss}) ->
+ vu_seg_list(V, Ss);
+vu_expr(V, #c_fun{vars=Vs,body=B}) ->
+ %% Variables in fun shadow previous variables
+ case vu_var_list(V, Vs) of
+ true -> false;
+ false -> vu_body(V, B)
+ end;
+vu_expr(V, #c_let{vars=Vs,arg=Arg,body=B}) ->
+ case vu_body(V, Arg) of
+ true -> true;
+ false ->
+ %% Variables in let shadow previous variables.
+ case vu_var_list(V, Vs) of
+ true -> false;
+ false -> vu_body(V, B)
+ end
+ end;
+vu_expr(V, #c_letrec{defs=Fs,body=B}) ->
+ case lists:any(fun (#c_def{val=Fb}) -> vu_body(V, Fb) end, Fs) of
+ true -> true;
+ false -> vu_body(V, B)
+ end;
+vu_expr(V, #c_seq{arg=Arg,body=B}) ->
+ case vu_expr(V, Arg) of
+ true -> true;
+ false -> vu_body(V, B)
+ end;
+vu_expr(V, #c_case{arg=Arg,clauses=Cs}) ->
+ case vu_expr(V, Arg) of
+ true -> true;
+ false -> vu_clauses(V, Cs)
+ end;
+vu_expr(V, #c_receive{clauses=Cs,timeout=T,action=A}) ->
+ case vu_clauses(V, Cs) of
+ true -> true;
+ false ->
+ case vu_expr(V, T) of
+ true -> true;
+ false -> vu_body(V, A)
+ end
+ end;
+vu_expr(V, #c_apply{op=Op,args=As}) ->
+ vu_expr_list(V, [Op|As]);
+vu_expr(V, #c_call{module=M,name=N,args=As}) ->
+ vu_expr_list(V, [M,N|As]);
+vu_expr(V, #c_primop{args=As}) -> %Name is an atom
+ vu_expr_list(V, As);
+vu_expr(V, #c_catch{body=B}) ->
+ vu_body(V, B);
+vu_expr(V, #c_try{arg=E,vars=Vs,body=B,evars=Evs,handler=H}) ->
+ case vu_body(V, E) of
+ true -> true;
+ false ->
+ %% Variables shadow previous ones.
+ case case vu_var_list(V, Vs) of
+ true -> false;
+ false -> vu_body(V, B)
+ end of
+ true -> true;
+ false ->
+ case vu_var_list(V, Evs) of
+ true -> false;
+ false -> vu_body(V, H)
+ end
+ end
+ end;
+vu_expr(_, _) -> false. %Everything else
+
+vu_expr_list(V, Es) ->
+ lists:any(fun(E) -> vu_expr(V, E) end, Es).
+
+vu_seg_list(V, Ss) ->
+ lists:any(fun (#c_bitstr{val=Val,size=Size}) ->
+ case vu_expr(V, Val) of
+ true -> true;
+ false -> vu_expr(V, Size)
+ end
+ end, Ss).
+
+%% vu_clause(VarName, Clause) -> true | false.
+%% vu_clauses(VarName, [Clause]) -> true | false.
+%% Have to get the pattern results right.
+
+vu_clause(V, #c_clause{pats=Ps,guard=G,body=B}) ->
+ case vu_pattern_list(V, Ps) of
+ {true,_Shad} -> true; %It is used
+ {false,true} -> false; %Shadowed
+ {false,false} -> %Not affected
+ case vu_expr(V, G) of
+ true -> true;
+ false ->vu_body(V, B)
+ end
+ end.
+
+vu_clauses(V, Cs) ->
+ lists:any(fun(C) -> vu_clause(V, C) end, Cs).
+
+%% vu_pattern(VarName, Pattern) -> {Used,Shadow}.
+%% vu_pattern_list(VarName, [Pattern]) -> {Used,Shadow}.
+%% Binaries complicate patterns as a variable can both be properly
+%% used, in a bit segment size, and shadow. They can also do both.
+
+%%vu_pattern(V, Pat) -> vu_pattern(V, Pat, {false,false}).
+
+vu_pattern(V, #c_var{name=V2}, St) ->
+ setelement(2, St, V =:= V2);
+vu_pattern(V, #c_cons{hd=H,tl=T}, St0) ->
+ case vu_pattern(V, H, St0) of
+ {true,true}=St1 -> St1; %Nothing more to know
+ St1 -> vu_pattern(V, T, St1)
+ end;
+vu_pattern(V, #c_tuple{es=Es}, St) ->
+ vu_pattern_list(V, Es, St);
+vu_pattern(V, #c_binary{segments=Ss}, St) ->
+ vu_pat_seg_list(V, Ss, St);
+vu_pattern(V, #c_alias{var=Var,pat=P}, St0) ->
+ case vu_pattern(V, Var, St0) of
+ {true,true}=St1 -> St1;
+ St1 -> vu_pattern(V, P, St1)
+ end;
+vu_pattern(_, _, St) -> St.
+
+vu_pattern_list(V, Ps) -> vu_pattern_list(V, Ps, {false,false}).
+
+vu_pattern_list(V, Ps, St0) ->
+ lists:foldl(fun(P, St) -> vu_pattern(V, P, St) end, St0, Ps).
+
+vu_pat_seg_list(V, Ss, St) ->
+ lists:foldl(fun (#c_bitstr{val=Val,size=Size}, St0) ->
+ case vu_pattern(V, Val, St0) of
+ {true,true}=St1 -> St1;
+ {_Used,Shad} -> {vu_expr(V, Size),Shad}
+ end
+ end, St, Ss).
+
+%% vu_var_list(VarName, [Var]) -> true | false.
+
+vu_var_list(V, Vs) ->
+ lists:any(fun (#c_var{name=V2}) -> V =:= V2 end, Vs).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lint.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lint.erl
new file mode 100644
index 0000000000..2946fcb8c0
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_lint.erl
@@ -0,0 +1,515 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: core_lint.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose : Do necessary checking of Core Erlang code.
+
+%% Check Core module for errors. Seeing this module is used in the
+%% compiler after optimisations wedone more checking than would be
+%% necessary after just parsing. Don't check all constructs.
+%%
+%% We check the following:
+%%
+%% All referred functions, called and exported, are defined.
+%% Format of export list.
+%% Format of attributes
+%% Used variables are defined.
+%% Variables in let and funs.
+%% Patterns case clauses.
+%% Values only as multiple values/variables/patterns.
+%% Return same number of values as requested
+%% Correct number of arguments
+%%
+%% Checks to add:
+%%
+%% Consistency of values/variables
+%% Consistency of function return values/calls.
+%%
+%% We keep the names defined variables and functions in a ordered list
+%% of variable names and function name/arity pairs.
+
+-module(core_lint).
+
+
+-export([module/1,module/2,format_error/1]).
+
+-import(lists, [reverse/1,all/2,foldl/3]).
+-import(ordsets, [add_element/2,is_element/2,union/2]).
+%-import(ordsets, [subtract/2]).
+
+-include("core_parse.hrl").
+
+%% Define the lint state record.
+
+-record(lint, {module=[], %Current module
+ func=[], %Current function
+ errors=[], %Errors
+ warnings=[]}). %Warnings
+
+%% Keep track of defined
+-record(def, {vars=[],
+ funs=[]}).
+
+%%-deftype retcount() -> any | unknown | int().
+
+%% format_error(Error)
+%% Return a string describing the error.
+
+format_error(invalid_exports) -> "invalid exports";
+format_error(invalid_attributes) -> "invalid attributes";
+format_error({undefined_function,{F,A}}) ->
+ io_lib:format("function ~w/~w undefined", [F,A]);
+format_error({undefined_function,{F1,A1},{F2,A2}}) ->
+ io_lib:format("undefined function ~w/~w in ~w/~w", [F1,A1,F2,A2]);
+format_error({illegal_expr,{F,A}}) ->
+ io_lib:format("illegal expression in ~w/~w", [F,A]);
+format_error({illegal_guard,{F,A}}) ->
+ io_lib:format("illegal guard expression in ~w/~w", [F,A]);
+format_error({illegal_pattern,{F,A}}) ->
+ io_lib:format("illegal pattern in ~w/~w", [F,A]);
+format_error({illegal_try,{F,A}}) ->
+ io_lib:format("illegal try expression in ~w/~w", [F,A]);
+format_error({pattern_mismatch,{F,A}}) ->
+ io_lib:format("pattern count mismatch in ~w/~w", [F,A]);
+format_error({return_mismatch,{F,A}}) ->
+ io_lib:format("return count mismatch in ~w/~w", [F,A]);
+format_error({arg_mismatch,{F,A}}) ->
+ io_lib:format("argument count mismatch in ~w/~w", [F,A]);
+format_error({unbound_var,N,{F,A}}) ->
+ io_lib:format("unbound variable ~s in ~w/~w", [N,F,A]);
+format_error({duplicate_var,N,{F,A}}) ->
+ io_lib:format("duplicate variable ~s in ~w/~w", [N,F,A]);
+format_error({not_var,{F,A}}) ->
+ io_lib:format("expecting variable in ~w/~w", [F,A]);
+format_error({not_pattern,{F,A}}) ->
+ io_lib:format("expecting pattern in ~w/~w", [F,A]);
+format_error({not_bs_pattern,{F,A}}) ->
+ io_lib:format("expecting bit syntax pattern in ~w/~w", [F,A]).
+
+%% module(CoreMod) ->
+%% module(CoreMod, [CompileOption]) ->
+%% {ok,[Warning]} | {error,[Error],[Warning]}
+
+module(M) -> module(M, []).
+
+module(#c_module{name=M,exports=Es,attrs=As,defs=Ds}, _Opts) ->
+ Defined = defined_funcs(Ds),
+ St0 = #lint{module=M#c_atom.val},
+ St1 = check_exports(Es, St0),
+ St2 = check_attrs(As, St1),
+ St3 = module_defs(Ds, Defined, St2),
+ St4 = check_state(Es, Defined, St3),
+ return_status(St4).
+
+%% defined_funcs([FuncDef]) -> [Fname].
+
+defined_funcs(Fs) ->
+ foldl(fun (#c_def{name=#c_fname{id=I,arity=A}}, Def) ->
+ add_element({I,A}, Def)
+ end, [], Fs).
+
+%% return_status(State) ->
+%% {ok,[Warning]} | {error,[Error],[Warning]}
+%% Pack errors and warnings properly and return ok | error.
+
+return_status(St) ->
+ Ws = reverse(St#lint.warnings),
+ case reverse(St#lint.errors) of
+ [] -> {ok,[{St#lint.module,Ws}]};
+ Es -> {error,[{St#lint.module,Es}],[{St#lint.module,Ws}]}
+ end.
+
+%% add_error(ErrorDescriptor, State) -> State'
+%% add_warning(ErrorDescriptor, State) -> State'
+%% Note that we don't use line numbers here.
+
+add_error(E, St) -> St#lint{errors=[{none,core_lint,E}|St#lint.errors]}.
+
+%%add_warning(W, St) -> St#lint{warnings=[{none,core_lint,W}|St#lint.warnings]}.
+
+check_exports(Es, St) ->
+ case all(fun (#c_fname{id=Name,arity=Arity}) when
+ atom(Name), integer(Arity) -> true;
+ (_) -> false
+ end, Es) of
+ true -> St;
+ false -> add_error(invalid_exports, St)
+ end.
+
+check_attrs(As, St) ->
+ case all(fun (#c_def{name=#c_atom{},val=V}) -> core_lib:is_literal(V);
+ (_) -> false
+ end, As) of
+ true -> St;
+ false -> add_error(invalid_attributes, St)
+ end.
+
+check_state(Es, Defined, St) ->
+ foldl(fun (#c_fname{id=N,arity=A}, St1) ->
+ F = {N,A},
+ case is_element(F, Defined) of
+ true -> St1;
+ false -> add_error({undefined_function,F}, St)
+ end
+ end, St, Es).
+% Undef = subtract(Es, Defined),
+% St1 = foldl(fun (F, St) -> add_error({undefined_function,F}, St) end,
+% St0, Undef),
+% St1.
+
+%% module_defs(CoreBody, Defined, State) -> State.
+
+module_defs(B, Def, St) ->
+ %% Set top level function name.
+ foldl(fun (Func, St0) ->
+ #c_fname{id=F,arity=A} = Func#c_def.name,
+ St1 = St0#lint{func={F,A}},
+ function(Func, Def, St1)
+ end, St, B).
+
+%% functions([Fdef], Defined, State) -> State.
+
+functions(Fs, Def, St0) ->
+ foldl(fun (F, St) -> function(F, Def, St) end, St0, Fs).
+
+%% function(CoreFunc, Defined, State) -> State.
+
+function(#c_def{name=#c_fname{},val=B}, Def, St) ->
+ %% Body must be a fun!
+ case B of
+ #c_fun{} -> expr(B, Def, any, St);
+ _ -> add_error({illegal_expr,St#lint.func}, St)
+ end.
+
+%% body(Expr, Defined, RetCount, State) -> State.
+
+body(#c_values{es=Es}, Def, Rt, St) ->
+ return_match(Rt, length(Es), expr_list(Es, Def, St));
+body(E, Def, Rt, St0) ->
+ St1 = expr(E, Def, Rt, St0),
+ case core_lib:is_simple_top(E) of
+ true -> return_match(Rt, 1, St1);
+ false -> St1
+ end.
+
+%% guard(Expr, Defined, State) -> State.
+%% Guards are boolean expressions with test wrapped in a protected.
+
+guard(Expr, Def, St) -> gexpr(Expr, Def, 1, St).
+
+%% guard_list([Expr], Defined, State) -> State.
+
+%% guard_list(Es, Def, St0) ->
+%% foldl(fun (E, St) -> guard(E, Def, St) end, St0, Es).
+
+%% gbody(Expr, Defined, RetCount, State) -> State.
+
+gbody(#c_values{es=Es}, Def, Rt, St) ->
+ return_match(Rt, length(Es), gexpr_list(Es, Def, St));
+gbody(E, Def, Rt, St0) ->
+ St1 = gexpr(E, Def, Rt, St0),
+ case core_lib:is_simple_top(E) of
+ true -> return_match(Rt, 1, St1);
+ false -> St1
+ end.
+
+gexpr(#c_var{name=N}, Def, _Rt, St) -> expr_var(N, Def, St);
+gexpr(#c_int{}, _Def, _Rt, St) -> St;
+gexpr(#c_float{}, _Def, _Rt, St) -> St;
+gexpr(#c_atom{}, _Def, _Rt, St) -> St;
+gexpr(#c_char{}, _Def, _Rt, St) -> St;
+gexpr(#c_string{}, _Def, _Rt, St) -> St;
+gexpr(#c_nil{}, _Def, _Rt, St) -> St;
+gexpr(#c_cons{hd=H,tl=T}, Def, _Rt, St) ->
+ gexpr_list([H,T], Def, St);
+gexpr(#c_tuple{es=Es}, Def, _Rt, St) ->
+ gexpr_list(Es, Def, St);
+gexpr(#c_binary{segments=Ss}, Def, _Rt, St) ->
+ gbitstr_list(Ss, Def, St);
+gexpr(#c_seq{arg=Arg,body=B}, Def, Rt, St0) ->
+ St1 = gexpr(Arg, Def, any, St0), %Ignore values
+ gbody(B, Def, Rt, St1);
+gexpr(#c_let{vars=Vs,arg=Arg,body=B}, Def, Rt, St0) ->
+ St1 = gbody(Arg, Def, let_varcount(Vs), St0), %This is a guard body
+ {Lvs,St2} = variable_list(Vs, St1),
+ gbody(B, union(Lvs, Def), Rt, St2);
+gexpr(#c_call{module=#c_atom{val=erlang},
+ name=#c_atom{},
+ args=As}, Def, 1, St) ->
+ gexpr_list(As, Def, St);
+gexpr(#c_primop{name=N,args=As}, Def, _Rt, St0) when record(N, c_atom) ->
+ gexpr_list(As, Def, St0);
+gexpr(#c_try{arg=E,vars=[#c_var{name=X}],body=#c_var{name=X},
+ evars=[#c_var{},#c_var{},#c_var{}],handler=#c_atom{val=false}},
+ Def, Rt, St) ->
+ gbody(E, Def, Rt, St);
+gexpr(_, _, _, St) ->
+ add_error({illegal_guard,St#lint.func}, St).
+
+%% gexpr_list([Expr], Defined, State) -> State.
+
+gexpr_list(Es, Def, St0) ->
+ foldl(fun (E, St) -> gexpr(E, Def, 1, St) end, St0, Es).
+
+%% gbitstr_list([Elem], Defined, State) -> State.
+
+gbitstr_list(Es, Def, St0) ->
+ foldl(fun (E, St) -> gbitstr(E, Def, St) end, St0, Es).
+
+gbitstr(#c_bitstr{val=V,size=S,unit=U,type=T,flags=Fs}, Def, St0) ->
+ St1 = bit_type(U, T, Fs, St0),
+ gexpr_list([V,S], Def, St1).
+
+%% expr(Expr, Defined, RetCount, State) -> State.
+
+expr(#c_var{name=N}, Def, _Rt, St) -> expr_var(N, Def, St);
+expr(#c_int{}, _Def, _Rt, St) -> St;
+expr(#c_float{}, _Def, _Rt, St) -> St;
+expr(#c_atom{}, _Def, _Rt, St) -> St;
+expr(#c_char{}, _Def, _Rt, St) -> St;
+expr(#c_string{}, _Def, _Rt, St) -> St;
+expr(#c_nil{}, _Def, _Rt, St) -> St;
+expr(#c_cons{hd=H,tl=T}, Def, _Rt, St) ->
+ expr_list([H,T], Def, St);
+expr(#c_tuple{es=Es}, Def, _Rt, St) ->
+ expr_list(Es, Def, St);
+expr(#c_binary{segments=Ss}, Def, _Rt, St) ->
+ bitstr_list(Ss, Def, St);
+expr(#c_fname{id=I,arity=A}, Def, _Rt, St) ->
+ expr_fname({I,A}, Def, St);
+expr(#c_fun{vars=Vs,body=B}, Def, Rt, St0) ->
+ {Vvs,St1} = variable_list(Vs, St0),
+ return_match(Rt, 1, body(B, union(Vvs, Def), any, St1));
+expr(#c_seq{arg=Arg,body=B}, Def, Rt, St0) ->
+ St1 = expr(Arg, Def, any, St0), %Ignore values
+ body(B, Def, Rt, St1);
+expr(#c_let{vars=Vs,arg=Arg,body=B}, Def, Rt, St0) ->
+ St1 = body(Arg, Def, let_varcount(Vs), St0), %This is a body
+ {Lvs,St2} = variable_list(Vs, St1),
+ body(B, union(Lvs, Def), Rt, St2);
+expr(#c_letrec{defs=Fs,body=B}, Def0, Rt, St0) ->
+ Def1 = union(defined_funcs(Fs), Def0), %All defined stuff
+ St1 = functions(Fs, Def1, St0),
+ body(B, Def1, Rt, St1#lint{func=St0#lint.func});
+expr(#c_case{arg=Arg,clauses=Cs}, Def, Rt, St0) ->
+ Pc = case_patcount(Cs),
+ St1 = body(Arg, Def, Pc, St0),
+ clauses(Cs, Def, Pc, Rt, St1);
+expr(#c_receive{clauses=Cs,timeout=T,action=A}, Def, Rt, St0) ->
+ St1 = expr(T, Def, 1, St0),
+ St2 = body(A, Def, Rt, St1),
+ clauses(Cs, Def, 1, Rt, St2);
+expr(#c_apply{op=Op,args=As}, Def, _Rt, St0) ->
+ St1 = apply_op(Op, Def, length(As), St0),
+ expr_list(As, Def, St1);
+expr(#c_call{module=M,name=N,args=As}, Def, _Rt, St0) ->
+ St1 = expr(M, Def, 1, St0),
+ St2 = expr(N, Def, 1, St1),
+ expr_list(As, Def, St2);
+expr(#c_primop{name=N,args=As}, Def, _Rt, St0) when record(N, c_atom) ->
+ expr_list(As, Def, St0);
+expr(#c_catch{body=B}, Def, Rt, St) ->
+ return_match(Rt, 1, body(B, Def, 1, St));
+expr(#c_try{arg=A,vars=Vs,body=B,evars=Evs,handler=H}, Def, Rt, St0) ->
+ St1 = case length(Evs) of
+ 2 -> St0;
+ _ -> add_error({illegal_try,St0#lint.func}, St0)
+ end,
+ St2 = body(A, Def, let_varcount(Vs), St1),
+ {Ns,St3} = variable_list(Vs, St2),
+ St4 = body(B, union(Ns, Def), Rt, St3),
+ {Ens,St5} = variable_list(Evs, St4),
+ body(H, union(Ens, Def), Rt, St5);
+expr(_, _, _, St) ->
+ %%io:fwrite("clint: ~p~n", [Other]),
+ add_error({illegal_expr,St#lint.func}, St).
+
+%% expr_list([Expr], Defined, State) -> State.
+
+expr_list(Es, Def, St0) ->
+ foldl(fun (E, St) -> expr(E, Def, 1, St) end, St0, Es).
+
+%% bitstr_list([Elem], Defined, State) -> State.
+
+bitstr_list(Es, Def, St0) ->
+ foldl(fun (E, St) -> bitstr(E, Def, St) end, St0, Es).
+
+bitstr(#c_bitstr{val=V,size=S,unit=U,type=T,flags=Fs}, Def, St0) ->
+ St1 = bit_type(U, T, Fs, St0),
+ expr_list([V,S], Def, St1).
+
+%% apply_op(Op, Defined, ArgCount, State) -> State.
+%% A apply op is either an fname or an expression.
+
+apply_op(#c_fname{id=I,arity=A}, Def, Ac, St0) ->
+ St1 = expr_fname({I,A}, Def, St0),
+ arg_match(Ac, A, St1);
+apply_op(E, Def, _, St) -> expr(E, Def, 1, St). %Hard to check
+
+%% expr_var(VarName, Defined, State) -> State.
+
+expr_var(N, Def, St) ->
+ case is_element(N, Def) of
+ true -> St;
+ false -> add_error({unbound_var,N,St#lint.func}, St)
+ end.
+
+%% expr_fname(Fname, Defined, State) -> State.
+
+expr_fname(Fname, Def, St) ->
+ case is_element(Fname, Def) of
+ true -> St;
+ false -> add_error({undefined_function,Fname,St#lint.func}, St)
+ end.
+
+%% let_varcount([Var]) -> int().
+
+let_varcount([]) -> any; %Ignore values
+let_varcount(Es) -> length(Es).
+
+%% case_patcount([Clause]) -> int().
+
+case_patcount([#c_clause{pats=Ps}|_]) -> length(Ps).
+
+%% clauses([Clause], Defined, PatCount, RetCount, State) -> State.
+
+clauses(Cs, Def, Pc, Rt, St0) ->
+ foldl(fun (C, St) -> clause(C, Def, Pc, Rt, St) end, St0, Cs).
+
+%% clause(Clause, Defined, PatCount, RetCount, State) -> State.
+
+clause(#c_clause{pats=Ps,guard=G,body=B}, Def0, Pc, Rt, St0) ->
+ St1 = pattern_match(Pc, length(Ps), St0),
+ {Pvs,St2} = pattern_list(Ps, Def0, St1),
+ Def1 = union(Pvs, Def0),
+ St3 = guard(G, Def1, St2),
+ body(B, Def1, Rt, St3).
+
+%% variable(Var, [PatVar], State) -> {[VarName],State}.
+
+variable(#c_var{name=N}, Ps, St) ->
+ case is_element(N, Ps) of
+ true -> {[],add_error({duplicate_var,N,St#lint.func}, St)};
+ false -> {[N],St}
+ end;
+variable(_, Def, St) -> {Def,add_error({not_var,St#lint.func}, St)}.
+
+%% variable_list([Var], State) -> {[Var],State}.
+%% variable_list([Var], [PatVar], State) -> {[Var],State}.
+
+variable_list(Vs, St) -> variable_list(Vs, [], St).
+
+variable_list(Vs, Ps, St) ->
+ foldl(fun (V, {Ps0,St0}) ->
+ {Vvs,St1} = variable(V, Ps0, St0),
+ {union(Vvs, Ps0),St1}
+ end, {Ps,St}, Vs).
+
+%% pattern(Pattern, Defined, State) -> {[PatVar],State}.
+%% pattern(Pattern, Defined, [PatVar], State) -> {[PatVar],State}.
+%% Patterns are complicated by sizes in binaries. These are pure
+%% input variables which create no bindings. We, therefor, need to
+%% carry around the original defined variables to get the correct
+%% handling.
+
+%% pattern(P, Def, St) -> pattern(P, Def, [], St).
+
+pattern(#c_var{name=N}, Def, Ps, St) ->
+ pat_var(N, Def, Ps, St);
+pattern(#c_int{}, _Def, Ps, St) -> {Ps,St};
+pattern(#c_float{}, _Def, Ps, St) -> {Ps,St};
+pattern(#c_atom{}, _Def, Ps, St) -> {Ps,St};
+pattern(#c_char{}, _Def, Ps, St) -> {Ps,St};
+pattern(#c_string{}, _Def, Ps, St) -> {Ps,St};
+pattern(#c_nil{}, _Def, Ps, St) -> {Ps,St};
+pattern(#c_cons{hd=H,tl=T}, Def, Ps, St) ->
+ pattern_list([H,T], Def, Ps, St);
+pattern(#c_tuple{es=Es}, Def, Ps, St) ->
+ pattern_list(Es, Def, Ps, St);
+pattern(#c_binary{segments=Ss}, Def, Ps, St) ->
+ pat_bin(Ss, Def, Ps, St);
+pattern(#c_alias{var=V,pat=P}, Def, Ps, St0) ->
+ {Vvs,St1} = variable(V, Ps, St0),
+ pattern(P, Def, union(Vvs, Ps), St1);
+pattern(_, _, Ps, St) -> {Ps,add_error({not_pattern,St#lint.func}, St)}.
+
+pat_var(N, _Def, Ps, St) ->
+ case is_element(N, Ps) of
+ true -> {Ps,add_error({duplicate_var,N,St#lint.func}, St)};
+ false -> {add_element(N, Ps),St}
+ end.
+
+%% pat_bin_list([Elem], Defined, [PatVar], State) -> {[PatVar],State}.
+
+pat_bin(Es, Def, Ps0, St0) ->
+ foldl(fun (E, {Ps,St}) -> pat_segment(E, Def, Ps, St) end, {Ps0,St0}, Es).
+
+pat_segment(#c_bitstr{val=V,size=S,unit=U,type=T,flags=Fs}, Def, Ps, St0) ->
+ St1 = bit_type(U, T, Fs, St0),
+ St2 = pat_bit_expr(S, T, Def, St1),
+ pattern(V, Def, Ps, St2);
+pat_segment(_, _, Ps, St) ->
+ {Ps,add_error({not_bs_pattern,St#lint.func}, St)}.
+
+%% pat_bit_expr(SizePat, Type, Defined, State) -> State.
+%% Check the Size pattern, this is an input! Be a bit tough here.
+
+pat_bit_expr(#c_int{val=I}, _, _, St) when I >= 0 -> St;
+pat_bit_expr(#c_var{name=N}, _, Def, St) ->
+ expr_var(N, Def, St);
+pat_bit_expr(#c_atom{val=all}, binary, _Def, St) -> St;
+pat_bit_expr(_, _, _, St) ->
+ add_error({illegal_expr,St#lint.func}, St).
+
+bit_type(Unit, Type, Flags, St) ->
+ U = core_lib:literal_value(Unit),
+ T = core_lib:literal_value(Type),
+ Fs = core_lib:literal_value(Flags),
+ case erl_bits:set_bit_type(default, [T,{unit,U}|Fs]) of
+ {ok,_,_} -> St;
+ {error,E} -> add_error({E,St#lint.func}, St)
+ end.
+
+%% pattern_list([Var], Defined, State) -> {[PatVar],State}.
+%% pattern_list([Var], Defined, [PatVar], State) -> {[PatVar],State}.
+
+pattern_list(Pats, Def, St) -> pattern_list(Pats, Def, [], St).
+
+pattern_list(Pats, Def, Ps0, St0) ->
+ foldl(fun (P, {Ps,St}) -> pattern(P, Def, Ps, St) end, {Ps0,St0}, Pats).
+
+%% pattern_match(Required, Supplied, State) -> State.
+%% Check that the required number of patterns match the supplied.
+
+pattern_match(N, N, St) -> St;
+pattern_match(_Req, _Sup, St) ->
+ add_error({pattern_mismatch,St#lint.func}, St).
+
+%% return_match(Required, Supplied, State) -> State.
+%% Check that the required number of return values match the supplied.
+
+return_match(any, _Sup, St) -> St;
+return_match(_Req, unknown, St) -> St;
+return_match(N, N, St) -> St;
+return_match(_Req, _Sup, St) ->
+ add_error({return_mismatch,St#lint.func}, St).
+
+%% arg_match(Required, Supplied, State) -> State.
+
+arg_match(_Req, unknown, St) -> St;
+arg_match(N, N, St) -> St;
+arg_match(_Req, _Sup, St) ->
+ add_error({arg_mismatch,St#lint.func}, St).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.erl
new file mode 100644
index 0000000000..942845bef7
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.erl
@@ -0,0 +1,4911 @@
+-module(core_parse).
+-define(THIS_MODULE, core_parse).
+-export([parse/1, parse_and_scan/1, format_error/1]).
+
+-export([abstract/1,abstract/2,normalise/1]).
+
+%% The following directive is needed for (significantly) faster compilation
+%% of the generated .erl file by the HiPE compiler. Please do not remove.
+-compile([{hipe,[{regalloc,linear_scan}]}]).
+
+-include("core_parse.hrl").
+
+tok_val(T) -> element(3, T).
+tok_line(T) -> element(2, T).
+
+abstract(T, _N) -> abstract(T).
+
+abstract(Term) -> core_lib:make_literal(Term).
+
+normalise(Core) -> core_lib:literal_value(Core).
+
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: core_parse.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% The parser generator will insert appropriate declarations before this line.%
+
+parse(Tokens) ->
+ case catch yeccpars1(Tokens, false, 0, [], []) of
+ error ->
+ Errorline =
+ if Tokens == [] -> 0; true -> element(2, hd(Tokens)) end,
+ {error,
+ {Errorline, ?THIS_MODULE, "syntax error at or after this line."}};
+ Other ->
+ Other
+ end.
+
+parse_and_scan({Mod, Fun, Args}) ->
+ case apply(Mod, Fun, Args) of
+ {eof, _} ->
+ {ok, eof};
+ {error, Descriptor, _} ->
+ {error, Descriptor};
+ {ok, Tokens, _} ->
+ yeccpars1(Tokens, {Mod, Fun, Args}, 0, [], [])
+ end.
+
+format_error(Message) ->
+ case io_lib:deep_char_list(Message) of
+ true ->
+ Message;
+ _ ->
+ io_lib:write(Message)
+ end.
+
+% To be used in grammar files to throw an error message to the parser toplevel.
+% Doesn't have to be exported!
+return_error(Line, Message) ->
+ throw({error, {Line, ?THIS_MODULE, Message}}).
+
+
+% Don't change yeccpars1/6 too much, it is called recursively by yeccpars2/8!
+yeccpars1([Token | Tokens], Tokenizer, State, States, Vstack) ->
+ yeccpars2(State, element(1, Token), States, Vstack, Token, Tokens,
+ Tokenizer);
+yeccpars1([], {M, F, A}, State, States, Vstack) ->
+ case catch apply(M, F, A) of
+ {eof, Endline} ->
+ {error, {Endline, ?THIS_MODULE, "end_of_file"}};
+ {error, Descriptor, _Endline} ->
+ {error, Descriptor};
+ {'EXIT', Reason} ->
+ {error, {0, ?THIS_MODULE, Reason}};
+ {ok, Tokens, _Endline} ->
+ case catch yeccpars1(Tokens, {M, F, A}, State, States, Vstack) of
+ error ->
+ Errorline = element(2, hd(Tokens)),
+ {error, {Errorline, ?THIS_MODULE,
+ "syntax error at or after this line."}};
+ Other ->
+ Other
+ end
+ end;
+yeccpars1([], false, State, States, Vstack) ->
+ yeccpars2(State, '$end', States, Vstack, {'$end', 999999}, [], false).
+
+% For internal use only.
+yeccerror(Token) ->
+ {error,
+ {element(2, Token), ?THIS_MODULE,
+ ["syntax error before: ", yecctoken2string(Token)]}}.
+
+yecctoken2string({atom, _, A}) -> io_lib:write(A);
+yecctoken2string({integer,_,N}) -> io_lib:write(N);
+yecctoken2string({float,_,F}) -> io_lib:write(F);
+yecctoken2string({char,_,C}) -> io_lib:write_char(C);
+yecctoken2string({var,_,V}) -> io_lib:format('~s', [V]);
+yecctoken2string({string,_,S}) -> io_lib:write_string(S);
+yecctoken2string({reserved_symbol, _, A}) -> io_lib:format('~w', [A]);
+yecctoken2string({_Cat, _, Val}) -> io_lib:format('~w', [Val]);
+
+yecctoken2string({'dot', _}) -> io_lib:format('~w', ['.']);
+yecctoken2string({'$end', _}) ->
+ [];
+yecctoken2string({Other, _}) when atom(Other) ->
+ io_lib:format('~w', [Other]);
+yecctoken2string(Other) ->
+ io_lib:write(Other).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+yeccpars2(0, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 1, [0 | __Ss], [__T | __Stack]);
+yeccpars2(0, 'module', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 2, [0 | __Ss], [__T | __Stack]);
+yeccpars2(0, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(1, 'module', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 313, [1 | __Ss], [__T | __Stack]);
+yeccpars2(1, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(2, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 4, [2 | __Ss], [__T | __Stack]);
+yeccpars2(2, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(3, '$end', _, __Stack, _, _, _) ->
+ {ok, hd(__Stack)};
+yeccpars2(3, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(4, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 5, [4 | __Ss], [__T | __Stack]);
+yeccpars2(4, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(5, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 11, [5 | __Ss], [__T | __Stack]);
+yeccpars2(5, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 306, [5 | __Ss], [__T | __Stack]);
+yeccpars2(5, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(6, 'attributes', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 7, [6 | __Ss], [__T | __Stack]);
+yeccpars2(6, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(7, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 276, [7 | __Ss], [__T | __Stack]);
+yeccpars2(7, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(8, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 9, [8 | __Ss], [__T | __Stack]);
+yeccpars2(8, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 11, [8 | __Ss], [__T | __Stack]);
+yeccpars2(8, __Cat, __Ss, __Stack, __T, __Ts, __Tzr) ->
+ __Val = [],
+ yeccpars2(13, __Cat, [8 | __Ss], [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(9, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 11, [9 | __Ss], [__T | __Stack]);
+yeccpars2(9, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(10, '=', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 20, [10 | __Ss], [__T | __Stack]);
+yeccpars2(10, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(11, '/', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 18, [11 | __Ss], [__T | __Stack]);
+yeccpars2(11, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(12, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 9, [12 | __Ss], [__T | __Stack]);
+yeccpars2(12, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 11, [12 | __Ss], [__T | __Stack]);
+yeccpars2(12, __Cat, __Ss, __Stack, __T, __Ts, __Tzr) ->
+ __Val = [],
+ yeccpars2(17, __Cat, [12 | __Ss], [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(13, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(module_defs, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(14, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_function_name, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(15, 'end', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 16, [15 | __Ss], [__T | __Stack]);
+yeccpars2(15, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(16, __Cat, __Ss, [__6,__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_module{name = #c_atom{val = tok_val(__2)}, exports = __3, attrs = __4, defs = __5},
+ __Nss = lists:nthtail(5, __Ss),
+ yeccpars2(yeccgoto(module_definition, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(17, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__2],
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(function_definitions, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(18, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 19, [18 | __Ss], [__T | __Stack]);
+yeccpars2(18, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(19, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_fname{id = tok_val(__1), arity = tok_val(__3)},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(function_name, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(20, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [20 | __Ss], [__T | __Stack]);
+yeccpars2(20, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 21, [20 | __Ss], [__T | __Stack]);
+yeccpars2(20, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(21, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [21 | __Ss], [__T | __Stack]);
+yeccpars2(21, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(22, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_def{name = __1, val = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(function_definition, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(23, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 25, [23 | __Ss], [__T | __Stack]);
+yeccpars2(23, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(24, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_fun, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(25, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 27, [25 | __Ss], [__T | __Stack]);
+yeccpars2(25, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 26, [25 | __Ss], [__T | __Stack]);
+yeccpars2(25, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [25 | __Ss], [__T | __Stack]);
+yeccpars2(25, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(26, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [26 | __Ss], [__T | __Stack]);
+yeccpars2(26, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(27, '->', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 265, [27 | __Ss], [__T | __Stack]);
+yeccpars2(27, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(28, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 263, [28 | __Ss], [__T | __Stack]);
+yeccpars2(28, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(anno_variables, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(29, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 32, [29 | __Ss], [__T | __Stack]);
+yeccpars2(29, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(30, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_var{name = tok_val(__1)},
+ yeccpars2(yeccgoto(variable, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(31, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_variable, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(32, '->', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 33, [32 | __Ss], [__T | __Stack]);
+yeccpars2(32, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(33, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [33 | __Ss], [__T | __Stack]);
+yeccpars2(33, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(34, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 247, [34 | __Ss], [__T | __Stack]);
+yeccpars2(34, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(35, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [35 | __Ss], [__T | __Stack]);
+yeccpars2(35, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(36, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 240, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [36 | __Ss], [__T | __Stack]);
+yeccpars2(36, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(37, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 149, [37 | __Ss], [__T | __Stack]);
+yeccpars2(37, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(38, __Cat, __Ss, [__6,__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_fun{vars = __3, body = __6},
+ __Nss = lists:nthtail(5, __Ss),
+ yeccpars2(yeccgoto(fun_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(39, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(40, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [40 | __Ss], [__T | __Stack]);
+yeccpars2(40, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(41, '/', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 18, [41 | __Ss], [__T | __Stack]);
+yeccpars2(41, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_atom{val = tok_val(__1)},
+ yeccpars2(yeccgoto(atomic_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(42, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(43, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(44, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [44 | __Ss], [__T | __Stack]);
+yeccpars2(44, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(45, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(46, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [46 | __Ss], [__T | __Stack]);
+yeccpars2(46, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(47, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(48, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [48 | __Ss], [__T | __Stack]);
+yeccpars2(48, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(49, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(50, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_char{val = tok_val(__1)},
+ yeccpars2(yeccgoto(atomic_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(51, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(52, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [52 | __Ss], [__T | __Stack]);
+yeccpars2(52, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(53, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(54, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_float{val = tok_val(__1)},
+ yeccpars2(yeccgoto(atomic_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(55, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(56, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(57, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_int{val = tok_val(__1)},
+ yeccpars2(yeccgoto(atomic_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(58, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 83, [58 | __Ss], [__T | __Stack]);
+yeccpars2(58, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 26, [58 | __Ss], [__T | __Stack]);
+yeccpars2(58, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [58 | __Ss], [__T | __Stack]);
+yeccpars2(58, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(59, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(60, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 9, [60 | __Ss], [__T | __Stack]);
+yeccpars2(60, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 11, [60 | __Ss], [__T | __Stack]);
+yeccpars2(60, __Cat, __Ss, __Stack, __T, __Ts, __Tzr) ->
+ __Val = [],
+ yeccpars2(210, __Cat, [60 | __Ss], [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(61, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(62, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_nil{},
+ yeccpars2(yeccgoto(atomic_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(63, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 208, [63 | __Ss], [__T | __Stack]);
+yeccpars2(63, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(64, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(65, 'after', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 99, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 97, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 96, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [65 | __Ss], [__T | __Stack]);
+yeccpars2(65, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(66, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(67, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(68, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(69, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_string{val = tok_val(__1)},
+ yeccpars2(yeccgoto(atomic_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(70, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [70 | __Ss], [__T | __Stack]);
+yeccpars2(70, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(71, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(72, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(73, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(single_expression, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(74, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 77, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [74 | __Ss], [__T | __Stack]);
+yeccpars2(74, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(75, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 79, [75 | __Ss], [__T | __Stack]);
+yeccpars2(75, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(anno_expressions, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(76, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 78, [76 | __Ss], [__T | __Stack]);
+yeccpars2(76, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(77, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_tuple{es = []},
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(tuple, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(78, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_tuple{es = __2},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tuple, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(79, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [79 | __Ss], [__T | __Stack]);
+yeccpars2(79, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(80, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(anno_expressions, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(81, 'of', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 82, [81 | __Ss], [__T | __Stack]);
+yeccpars2(81, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(82, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 83, [82 | __Ss], [__T | __Stack]);
+yeccpars2(82, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 26, [82 | __Ss], [__T | __Stack]);
+yeccpars2(82, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [82 | __Ss], [__T | __Stack]);
+yeccpars2(82, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(83, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 92, [83 | __Ss], [__T | __Stack]);
+yeccpars2(83, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 26, [83 | __Ss], [__T | __Stack]);
+yeccpars2(83, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [83 | __Ss], [__T | __Stack]);
+yeccpars2(83, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(84, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(let_vars, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(85, '->', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 86, [85 | __Ss], [__T | __Stack]);
+yeccpars2(85, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(86, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [86 | __Ss], [__T | __Stack]);
+yeccpars2(86, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(87, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 88, [87 | __Ss], [__T | __Stack]);
+yeccpars2(87, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(88, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 83, [88 | __Ss], [__T | __Stack]);
+yeccpars2(88, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 26, [88 | __Ss], [__T | __Stack]);
+yeccpars2(88, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [88 | __Ss], [__T | __Stack]);
+yeccpars2(88, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(89, '->', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 90, [89 | __Ss], [__T | __Stack]);
+yeccpars2(89, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(90, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [90 | __Ss], [__T | __Stack]);
+yeccpars2(90, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(91, __Cat, __Ss, [__10,__9,__8,__7,__6,__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = if length(__8) == 2 -> #c_try{arg = __2, vars = __4, body = __6, evars = __8, handler = __10}; true -> return_error(tok_line(__7),"expected 2 exception variables in 'try'") end,
+ __Nss = lists:nthtail(9, __Ss),
+ yeccpars2(yeccgoto(try_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(92, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(let_vars, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(93, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 94, [93 | __Ss], [__T | __Stack]);
+yeccpars2(93, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(94, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(let_vars, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(95, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 190, [95 | __Ss], [__T | __Stack]);
+yeccpars2(95, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(96, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 97, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [96 | __Ss], [__T | __Stack]);
+yeccpars2(96, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(97, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 182, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [97 | __Ss], [__T | __Stack]);
+yeccpars2(97, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(98, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 149, [98 | __Ss], [__T | __Stack]);
+yeccpars2(98, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(99, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [99 | __Ss], [__T | __Stack]);
+yeccpars2(99, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(100, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 97, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 96, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [100 | __Ss], [__T | __Stack]);
+yeccpars2(100, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(anno_clauses, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(101, 'after', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 99, [101 | __Ss], [__T | __Stack]);
+yeccpars2(101, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(102, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(clause_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(103, '=', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 162, [103 | __Ss], [__T | __Stack]);
+yeccpars2(103, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(104, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_atom{val = tok_val(__1)},
+ yeccpars2(yeccgoto(atomic_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(105, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(atomic_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(106, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(other_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(107, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(other_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(108, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_clause, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(109, 'when', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 164, [109 | __Ss], [__T | __Stack]);
+yeccpars2(109, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(110, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(other_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(111, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(112, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = begin
+ {T,A} = __2, #c_receive{clauses = [], timeout = T, action = A}
+ end,
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(receive_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(113, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(other_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(114, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 118, [114 | __Ss], [__T | __Stack]);
+yeccpars2(114, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(115, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 26, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [115 | __Ss], [__T | __Stack]);
+yeccpars2(115, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(116, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 120, [116 | __Ss], [__T | __Stack]);
+yeccpars2(116, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(anno_patterns, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(117, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 119, [117 | __Ss], [__T | __Stack]);
+yeccpars2(117, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(118, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_tuple{es = []},
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(tuple_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(119, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_tuple{es = __2},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tuple_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(120, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [120 | __Ss], [__T | __Stack]);
+yeccpars2(120, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(121, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(anno_patterns, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(122, '=', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 162, [122 | __Ss], [__T | __Stack]);
+yeccpars2(122, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(123, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 159, [123 | __Ss], [__T | __Stack]);
+yeccpars2(123, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(124, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 125, [124 | __Ss], [__T | __Stack]);
+yeccpars2(124, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_variable, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(125, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 126, [125 | __Ss], [__T | __Stack]);
+yeccpars2(125, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(126, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 129, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 142, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 140, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 131, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 137, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 138, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 133, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 130, [126 | __Ss], [__T | __Stack]);
+yeccpars2(126, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(127, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 128, [127 | __Ss], [__T | __Stack]);
+yeccpars2(127, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(128, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = core_lib:set_anno(__2,__4),
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(anno_variable, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(129, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 129, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 142, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 140, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 131, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 137, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 138, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 133, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 149, [129 | __Ss], [__T | __Stack]);
+yeccpars2(129, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(130, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(annotation, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(131, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = tok_val(__1),
+ yeccpars2(yeccgoto(atomic_constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(132, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(133, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = tok_val(__1),
+ yeccpars2(yeccgoto(atomic_constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(134, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(135, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 147, [135 | __Ss], [__T | __Stack]);
+yeccpars2(135, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(constants, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(136, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 146, [136 | __Ss], [__T | __Stack]);
+yeccpars2(136, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(137, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = tok_val(__1),
+ yeccpars2(yeccgoto(atomic_constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(138, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = tok_val(__1),
+ yeccpars2(yeccgoto(atomic_constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(139, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ yeccpars2(yeccgoto(atomic_constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(140, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = tok_val(__1),
+ yeccpars2(yeccgoto(atomic_constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(141, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(142, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 129, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 142, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 144, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 140, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 131, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 137, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 138, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 133, [142 | __Ss], [__T | __Stack]);
+yeccpars2(142, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(143, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 145, [143 | __Ss], [__T | __Stack]);
+yeccpars2(143, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(144, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = {},
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(tuple_constant, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(145, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = list_to_tuple(__2),
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tuple_constant, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(146, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(annotation, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(147, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 129, [147 | __Ss], [__T | __Stack]);
+yeccpars2(147, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 142, [147 | __Ss], [__T | __Stack]);
+yeccpars2(147, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 140, [147 | __Ss], [__T | __Stack]);
+yeccpars2(147, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 131, [147 | __Ss], [__T | __Stack]);
+yeccpars2(147, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 137, [147 | __Ss], [__T | __Stack]);
+yeccpars2(147, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 138, [147 | __Ss], [__T | __Stack]);
+yeccpars2(147, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 133, [147 | __Ss], [__T | __Stack]);
+yeccpars2(147, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(148, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(constants, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(149, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = {nil,tok_line(__1)},
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(nil, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(150, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 151, [150 | __Ss], [__T | __Stack]);
+yeccpars2(150, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 154, [150 | __Ss], [__T | __Stack]);
+yeccpars2(150, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 152, [150 | __Ss], [__T | __Stack]);
+yeccpars2(150, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(151, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 129, [151 | __Ss], [__T | __Stack]);
+yeccpars2(151, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 142, [151 | __Ss], [__T | __Stack]);
+yeccpars2(151, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 140, [151 | __Ss], [__T | __Stack]);
+yeccpars2(151, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 131, [151 | __Ss], [__T | __Stack]);
+yeccpars2(151, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 137, [151 | __Ss], [__T | __Stack]);
+yeccpars2(151, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 138, [151 | __Ss], [__T | __Stack]);
+yeccpars2(151, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 133, [151 | __Ss], [__T | __Stack]);
+yeccpars2(151, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(152, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ yeccpars2(yeccgoto(tail_constant, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(153, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__2|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(cons_constant, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(154, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 129, [154 | __Ss], [__T | __Stack]);
+yeccpars2(154, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 142, [154 | __Ss], [__T | __Stack]);
+yeccpars2(154, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 140, [154 | __Ss], [__T | __Stack]);
+yeccpars2(154, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 131, [154 | __Ss], [__T | __Stack]);
+yeccpars2(154, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 137, [154 | __Ss], [__T | __Stack]);
+yeccpars2(154, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 138, [154 | __Ss], [__T | __Stack]);
+yeccpars2(154, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 133, [154 | __Ss], [__T | __Stack]);
+yeccpars2(154, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(155, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 156, [155 | __Ss], [__T | __Stack]);
+yeccpars2(155, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(156, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail_constant, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(157, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 151, [157 | __Ss], [__T | __Stack]);
+yeccpars2(157, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 154, [157 | __Ss], [__T | __Stack]);
+yeccpars2(157, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 152, [157 | __Ss], [__T | __Stack]);
+yeccpars2(157, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(158, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__2|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail_constant, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(159, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 126, [159 | __Ss], [__T | __Stack]);
+yeccpars2(159, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(160, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 161, [160 | __Ss], [__T | __Stack]);
+yeccpars2(160, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(161, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = core_lib:set_anno(__2,__4),
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(anno_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(162, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [162 | __Ss], [__T | __Stack]);
+yeccpars2(162, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(163, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_alias{var = __1, pat = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(other_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(164, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [164 | __Ss], [__T | __Stack]);
+yeccpars2(164, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(165, '->', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 166, [165 | __Ss], [__T | __Stack]);
+yeccpars2(165, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(166, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [166 | __Ss], [__T | __Stack]);
+yeccpars2(166, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(167, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_clause{pats = __1, guard = __3, body = __5},
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(clause, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(168, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = begin
+ {T,A} = __3, #c_receive{clauses = __2, timeout = T, action = A}
+ end,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(receive_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(169, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__2],
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(anno_clauses, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(170, '->', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 171, [170 | __Ss], [__T | __Stack]);
+yeccpars2(170, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(171, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [171 | __Ss], [__T | __Stack]);
+yeccpars2(171, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(172, __Cat, __Ss, [__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = {__2,__4},
+ __Nss = lists:nthtail(3, __Ss),
+ yeccpars2(yeccgoto(timeout, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(173, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 174, [173 | __Ss], [__T | __Stack]);
+yeccpars2(173, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 177, [173 | __Ss], [__T | __Stack]);
+yeccpars2(173, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 175, [173 | __Ss], [__T | __Stack]);
+yeccpars2(173, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(174, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [174 | __Ss], [__T | __Stack]);
+yeccpars2(174, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(175, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_nil{},
+ yeccpars2(yeccgoto(tail_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(176, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_cons{hd = __2, tl = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(cons_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(177, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [177 | __Ss], [__T | __Stack]);
+yeccpars2(177, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(178, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 179, [178 | __Ss], [__T | __Stack]);
+yeccpars2(178, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(179, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(180, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 174, [180 | __Ss], [__T | __Stack]);
+yeccpars2(180, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 177, [180 | __Ss], [__T | __Stack]);
+yeccpars2(180, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 175, [180 | __Ss], [__T | __Stack]);
+yeccpars2(180, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(181, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_cons{hd = __2, tl = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(182, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(clause_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(183, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 184, [183 | __Ss], [__T | __Stack]);
+yeccpars2(183, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(184, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(clause_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(185, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 187, [185 | __Ss], [__T | __Stack]);
+yeccpars2(185, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(186, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 159, [186 | __Ss], [__T | __Stack]);
+yeccpars2(186, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(anno_pattern, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(187, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 126, [187 | __Ss], [__T | __Stack]);
+yeccpars2(187, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(188, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 189, [188 | __Ss], [__T | __Stack]);
+yeccpars2(188, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(189, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = core_lib:set_anno(__2,__4),
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(anno_clause, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(190, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 191, [190 | __Ss], [__T | __Stack]);
+yeccpars2(190, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 194, [190 | __Ss], [__T | __Stack]);
+yeccpars2(190, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(191, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 200, [191 | __Ss], [__T | __Stack]);
+yeccpars2(191, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(192, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 198, [192 | __Ss], [__T | __Stack]);
+yeccpars2(192, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(segment_patterns, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(193, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 196, [193 | __Ss], [__T | __Stack]);
+yeccpars2(193, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(194, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 195, [194 | __Ss], [__T | __Stack]);
+yeccpars2(194, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(195, __Cat, __Ss, [__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_binary{segments = []},
+ __Nss = lists:nthtail(3, __Ss),
+ yeccpars2(yeccgoto(binary_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(196, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 197, [196 | __Ss], [__T | __Stack]);
+yeccpars2(196, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(197, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_binary{segments = __3},
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(binary_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(198, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 191, [198 | __Ss], [__T | __Stack]);
+yeccpars2(198, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(199, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(segment_patterns, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(200, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 115, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [200 | __Ss], [__T | __Stack]);
+yeccpars2(200, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(201, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 202, [201 | __Ss], [__T | __Stack]);
+yeccpars2(201, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(202, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 203, [202 | __Ss], [__T | __Stack]);
+yeccpars2(202, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(203, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 205, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [203 | __Ss], [__T | __Stack]);
+yeccpars2(203, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(204, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = case __5 of [S,U,T,Fs] -> #c_bitstr{val = __3, size = S, unit = U, type = T, flags = Fs}; true -> return_error(tok_line(__1),"expected 4 arguments in binary segment") end,
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(segment_pattern, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(205, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(arg_list, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(206, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 207, [206 | __Ss], [__T | __Stack]);
+yeccpars2(206, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(207, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(arg_list, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(208, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 203, [208 | __Ss], [__T | __Stack]);
+yeccpars2(208, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(209, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = begin
+ Name = #c_atom{val = tok_val(__2)}, #c_primop{name = Name, args = __3}
+ end,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(primop_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(210, 'in', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 211, [210 | __Ss], [__T | __Stack]);
+yeccpars2(210, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(211, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [211 | __Ss], [__T | __Stack]);
+yeccpars2(211, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(212, __Cat, __Ss, [__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_letrec{defs = __2, body = __4},
+ __Nss = lists:nthtail(3, __Ss),
+ yeccpars2(yeccgoto(letrec_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(213, '=', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 214, [213 | __Ss], [__T | __Stack]);
+yeccpars2(213, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(214, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [214 | __Ss], [__T | __Stack]);
+yeccpars2(214, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(215, 'in', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 216, [215 | __Ss], [__T | __Stack]);
+yeccpars2(215, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(216, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [216 | __Ss], [__T | __Stack]);
+yeccpars2(216, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(217, __Cat, __Ss, [__6,__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_let{vars = __2, arg = __4, body = __6},
+ __Nss = lists:nthtail(5, __Ss),
+ yeccpars2(yeccgoto(let_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(218, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [218 | __Ss], [__T | __Stack]);
+yeccpars2(218, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(219, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_seq{arg = __2, body = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(sequence, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(220, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_catch{body = __2},
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(catch_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(221, 'of', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 222, [221 | __Ss], [__T | __Stack]);
+yeccpars2(221, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(222, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 97, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 96, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 95, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 98, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 114, [222 | __Ss], [__T | __Stack]);
+yeccpars2(222, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(223, 'end', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 224, [223 | __Ss], [__T | __Stack]);
+yeccpars2(223, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(224, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_case{arg = __2, clauses = __4},
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(case_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(225, ':', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 226, [225 | __Ss], [__T | __Stack]);
+yeccpars2(225, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(226, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [226 | __Ss], [__T | __Stack]);
+yeccpars2(226, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(227, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 203, [227 | __Ss], [__T | __Stack]);
+yeccpars2(227, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(228, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_call{module = __2, name = __4, args = __5},
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(call_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(229, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 203, [229 | __Ss], [__T | __Stack]);
+yeccpars2(229, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(230, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_apply{op = __2, args = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(application_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(231, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 232, [231 | __Ss], [__T | __Stack]);
+yeccpars2(231, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 235, [231 | __Ss], [__T | __Stack]);
+yeccpars2(231, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 233, [231 | __Ss], [__T | __Stack]);
+yeccpars2(231, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(232, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [232 | __Ss], [__T | __Stack]);
+yeccpars2(232, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(233, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_nil{},
+ yeccpars2(yeccgoto(tail, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(234, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_cons{hd = __2, tl = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(cons, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(235, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [235 | __Ss], [__T | __Stack]);
+yeccpars2(235, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(236, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 237, [236 | __Ss], [__T | __Stack]);
+yeccpars2(236, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(237, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(238, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 232, [238 | __Ss], [__T | __Stack]);
+yeccpars2(238, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 235, [238 | __Ss], [__T | __Stack]);
+yeccpars2(238, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 233, [238 | __Ss], [__T | __Stack]);
+yeccpars2(238, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(239, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_cons{hd = __2, tl = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(240, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_values{es = []},
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(expression, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(241, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 242, [241 | __Ss], [__T | __Stack]);
+yeccpars2(241, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(242, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_values{es = __2},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(expression, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(243, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 244, [243 | __Ss], [__T | __Stack]);
+yeccpars2(243, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(244, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 126, [244 | __Ss], [__T | __Stack]);
+yeccpars2(244, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(245, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 246, [245 | __Ss], [__T | __Stack]);
+yeccpars2(245, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(246, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = core_lib:set_anno(__2,__4),
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(anno_expression, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(247, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 248, [247 | __Ss], [__T | __Stack]);
+yeccpars2(247, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 251, [247 | __Ss], [__T | __Stack]);
+yeccpars2(247, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(248, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 257, [248 | __Ss], [__T | __Stack]);
+yeccpars2(248, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(249, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 255, [249 | __Ss], [__T | __Stack]);
+yeccpars2(249, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(segments, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(250, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 253, [250 | __Ss], [__T | __Stack]);
+yeccpars2(250, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(251, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 252, [251 | __Ss], [__T | __Stack]);
+yeccpars2(251, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(252, __Cat, __Ss, [__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_binary{segments = []},
+ __Nss = lists:nthtail(3, __Ss),
+ yeccpars2(yeccgoto(binary, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(253, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 254, [253 | __Ss], [__T | __Stack]);
+yeccpars2(253, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(254, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_binary{segments = __3},
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(binary, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(255, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 248, [255 | __Ss], [__T | __Stack]);
+yeccpars2(255, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(256, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(segments, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(257, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [257 | __Ss], [__T | __Stack]);
+yeccpars2(257, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(258, '>', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 259, [258 | __Ss], [__T | __Stack]);
+yeccpars2(258, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(259, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 260, [259 | __Ss], [__T | __Stack]);
+yeccpars2(259, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(260, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [260 | __Ss], [__T | __Stack]);
+yeccpars2(260, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(261, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 262, [261 | __Ss], [__T | __Stack]);
+yeccpars2(261, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(262, __Cat, __Ss, [__7,__6,__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = case __6 of [S,U,T,Fs] -> #c_bitstr{val = __3, size = S, unit = U, type = T, flags = Fs}; true -> return_error(tok_line(__1),"expected 4 arguments in binary segment") end,
+ __Nss = lists:nthtail(6, __Ss),
+ yeccpars2(yeccgoto(segment, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(263, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 26, [263 | __Ss], [__T | __Stack]);
+yeccpars2(263, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [263 | __Ss], [__T | __Stack]);
+yeccpars2(263, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(264, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(anno_variables, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(265, 'receive', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 65, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'catch', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 48, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'try', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 70, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'primop', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 63, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'call', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 44, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'apply', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 40, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'case', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 46, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'letrec', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 60, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'let', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 58, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'fun', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 23, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'do', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 52, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 41, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, '#', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 34, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 37, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 74, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, '<', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 36, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 35, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, 'var', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 30, [265 | __Ss], [__T | __Stack]);
+yeccpars2(265, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(266, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_fun{vars = [], body = __5},
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(fun_expr, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(267, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 125, [267 | __Ss], [__T | __Stack]);
+yeccpars2(267, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(268, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 269, [268 | __Ss], [__T | __Stack]);
+yeccpars2(268, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(269, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 126, [269 | __Ss], [__T | __Stack]);
+yeccpars2(269, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(270, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 271, [270 | __Ss], [__T | __Stack]);
+yeccpars2(270, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(271, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = core_lib:set_anno(__2,__4),
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(anno_fun, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(272, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 273, [272 | __Ss], [__T | __Stack]);
+yeccpars2(272, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(273, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 126, [273 | __Ss], [__T | __Stack]);
+yeccpars2(273, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(274, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 275, [274 | __Ss], [__T | __Stack]);
+yeccpars2(274, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(275, __Cat, __Ss, [__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = core_lib:set_anno(__2,__4),
+ __Nss = lists:nthtail(4, __Ss),
+ yeccpars2(yeccgoto(anno_function_name, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(276, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 278, [276 | __Ss], [__T | __Stack]);
+yeccpars2(276, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 277, [276 | __Ss], [__T | __Stack]);
+yeccpars2(276, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(277, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(module_attribute, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(278, '=', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 284, [278 | __Ss], [__T | __Stack]);
+yeccpars2(278, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(279, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 282, [279 | __Ss], [__T | __Stack]);
+yeccpars2(279, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(attribute_list, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(280, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 281, [280 | __Ss], [__T | __Stack]);
+yeccpars2(280, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(281, __Cat, __Ss, [__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __3,
+ __Nss = lists:nthtail(3, __Ss),
+ yeccpars2(yeccgoto(module_attribute, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(282, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 278, [282 | __Ss], [__T | __Stack]);
+yeccpars2(282, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(283, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(attribute_list, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(284, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 285, [284 | __Ss], [__T | __Stack]);
+yeccpars2(284, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 290, [284 | __Ss], [__T | __Stack]);
+yeccpars2(284, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [284 | __Ss], [__T | __Stack]);
+yeccpars2(284, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [284 | __Ss], [__T | __Stack]);
+yeccpars2(284, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [284 | __Ss], [__T | __Stack]);
+yeccpars2(284, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [284 | __Ss], [__T | __Stack]);
+yeccpars2(284, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [284 | __Ss], [__T | __Stack]);
+yeccpars2(284, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(285, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 285, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 290, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 149, [285 | __Ss], [__T | __Stack]);
+yeccpars2(285, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(286, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(287, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(288, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_def{name = #c_atom{val = tok_val(__1)}, val = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(attribute, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(289, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(290, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 285, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 290, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 293, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [290 | __Ss], [__T | __Stack]);
+yeccpars2(290, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(291, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 295, [291 | __Ss], [__T | __Stack]);
+yeccpars2(291, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(literals, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(292, '}', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 294, [292 | __Ss], [__T | __Stack]);
+yeccpars2(292, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(293, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_tuple{es = []},
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(tuple_literal, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(294, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_tuple{es = __2},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tuple_literal, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(295, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 285, [295 | __Ss], [__T | __Stack]);
+yeccpars2(295, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 290, [295 | __Ss], [__T | __Stack]);
+yeccpars2(295, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [295 | __Ss], [__T | __Stack]);
+yeccpars2(295, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [295 | __Ss], [__T | __Stack]);
+yeccpars2(295, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [295 | __Ss], [__T | __Stack]);
+yeccpars2(295, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [295 | __Ss], [__T | __Stack]);
+yeccpars2(295, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [295 | __Ss], [__T | __Stack]);
+yeccpars2(295, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(296, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(literals, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(297, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 298, [297 | __Ss], [__T | __Stack]);
+yeccpars2(297, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 301, [297 | __Ss], [__T | __Stack]);
+yeccpars2(297, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 299, [297 | __Ss], [__T | __Stack]);
+yeccpars2(297, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(298, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 285, [298 | __Ss], [__T | __Stack]);
+yeccpars2(298, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 290, [298 | __Ss], [__T | __Stack]);
+yeccpars2(298, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [298 | __Ss], [__T | __Stack]);
+yeccpars2(298, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [298 | __Ss], [__T | __Stack]);
+yeccpars2(298, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [298 | __Ss], [__T | __Stack]);
+yeccpars2(298, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [298 | __Ss], [__T | __Stack]);
+yeccpars2(298, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [298 | __Ss], [__T | __Stack]);
+yeccpars2(298, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(299, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_nil{},
+ yeccpars2(yeccgoto(tail_literal, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(300, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_cons{hd = __2, tl = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(cons_literal, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(301, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 285, [301 | __Ss], [__T | __Stack]);
+yeccpars2(301, '{', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 290, [301 | __Ss], [__T | __Stack]);
+yeccpars2(301, 'string', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 69, [301 | __Ss], [__T | __Stack]);
+yeccpars2(301, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 104, [301 | __Ss], [__T | __Stack]);
+yeccpars2(301, 'float', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 54, [301 | __Ss], [__T | __Stack]);
+yeccpars2(301, 'integer', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 57, [301 | __Ss], [__T | __Stack]);
+yeccpars2(301, 'char', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 50, [301 | __Ss], [__T | __Stack]);
+yeccpars2(301, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(302, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 303, [302 | __Ss], [__T | __Stack]);
+yeccpars2(302, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(303, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail_literal, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(304, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 298, [304 | __Ss], [__T | __Stack]);
+yeccpars2(304, '|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 301, [304 | __Ss], [__T | __Stack]);
+yeccpars2(304, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 299, [304 | __Ss], [__T | __Stack]);
+yeccpars2(304, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(305, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_cons{hd = __2, tl = __3},
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(tail_literal, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(306, __Cat, __Ss, [__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [],
+ __Nss = lists:nthtail(1, __Ss),
+ yeccpars2(yeccgoto(module_export, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(307, ',', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 311, [307 | __Ss], [__T | __Stack]);
+yeccpars2(307, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1],
+ yeccpars2(yeccgoto(exported_names, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(308, ']', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 310, [308 | __Ss], [__T | __Stack]);
+yeccpars2(308, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(309, __Cat, __Ss, [__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __1,
+ yeccpars2(yeccgoto(exported_name, hd(__Ss)), __Cat, __Ss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(310, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = __2,
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(module_export, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(311, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 11, [311 | __Ss], [__T | __Stack]);
+yeccpars2(311, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(312, __Cat, __Ss, [__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = [__1|__3],
+ __Nss = lists:nthtail(2, __Ss),
+ yeccpars2(yeccgoto(exported_names, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(313, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 314, [313 | __Ss], [__T | __Stack]);
+yeccpars2(313, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(314, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 5, [314 | __Ss], [__T | __Stack]);
+yeccpars2(314, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(315, 'attributes', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 7, [315 | __Ss], [__T | __Stack]);
+yeccpars2(315, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(316, '(', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 9, [316 | __Ss], [__T | __Stack]);
+yeccpars2(316, 'atom', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 11, [316 | __Ss], [__T | __Stack]);
+yeccpars2(316, __Cat, __Ss, __Stack, __T, __Ts, __Tzr) ->
+ __Val = [],
+ yeccpars2(13, __Cat, [316 | __Ss], [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(317, 'end', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 318, [317 | __Ss], [__T | __Stack]);
+yeccpars2(317, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(318, '-|', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 319, [318 | __Ss], [__T | __Stack]);
+yeccpars2(318, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(319, '[', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 126, [319 | __Ss], [__T | __Stack]);
+yeccpars2(319, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(320, ')', __Ss, __Stack, __T, __Ts, __Tzr) ->
+ yeccpars1(__Ts, __Tzr, 321, [320 | __Ss], [__T | __Stack]);
+yeccpars2(320, _, _, _, __T, _, _) ->
+ yeccerror(__T);
+yeccpars2(321, __Cat, __Ss, [__10,__9,__8,__7,__6,__5,__4,__3,__2,__1|__Stack], __T, __Ts, __Tzr) ->
+ __Val = #c_module{anno = __9, name = tok_val(__3), exports = __4, attrs = __5, defs = __6},
+ __Nss = lists:nthtail(9, __Ss),
+ yeccpars2(yeccgoto(module_definition, hd(__Nss)), __Cat, __Nss, [__Val | __Stack], __T, __Ts, __Tzr);
+yeccpars2(__Other, _, _, _, _, _, _) ->
+ exit({parser, __Other, missing_state_in_action_table}).
+
+yeccgoto(anno_clause, 65) ->
+ 100;
+yeccgoto(anno_clause, 100) ->
+ 100;
+yeccgoto(anno_clause, 222) ->
+ 100;
+yeccgoto(anno_clauses, 65) ->
+ 101;
+yeccgoto(anno_clauses, 100) ->
+ 169;
+yeccgoto(anno_clauses, 222) ->
+ 223;
+yeccgoto(anno_expression, 33) ->
+ 38;
+yeccgoto(anno_expression, 36) ->
+ 75;
+yeccgoto(anno_expression, 37) ->
+ 231;
+yeccgoto(anno_expression, 40) ->
+ 229;
+yeccgoto(anno_expression, 44) ->
+ 225;
+yeccgoto(anno_expression, 46) ->
+ 221;
+yeccgoto(anno_expression, 48) ->
+ 220;
+yeccgoto(anno_expression, 52) ->
+ 218;
+yeccgoto(anno_expression, 70) ->
+ 81;
+yeccgoto(anno_expression, 74) ->
+ 75;
+yeccgoto(anno_expression, 79) ->
+ 75;
+yeccgoto(anno_expression, 86) ->
+ 87;
+yeccgoto(anno_expression, 90) ->
+ 91;
+yeccgoto(anno_expression, 99) ->
+ 170;
+yeccgoto(anno_expression, 164) ->
+ 165;
+yeccgoto(anno_expression, 166) ->
+ 167;
+yeccgoto(anno_expression, 171) ->
+ 172;
+yeccgoto(anno_expression, 203) ->
+ 75;
+yeccgoto(anno_expression, 211) ->
+ 212;
+yeccgoto(anno_expression, 214) ->
+ 215;
+yeccgoto(anno_expression, 216) ->
+ 217;
+yeccgoto(anno_expression, 218) ->
+ 219;
+yeccgoto(anno_expression, 226) ->
+ 227;
+yeccgoto(anno_expression, 232) ->
+ 238;
+yeccgoto(anno_expression, 235) ->
+ 236;
+yeccgoto(anno_expression, 257) ->
+ 258;
+yeccgoto(anno_expression, 260) ->
+ 75;
+yeccgoto(anno_expression, 265) ->
+ 266;
+yeccgoto(anno_expressions, 36) ->
+ 241;
+yeccgoto(anno_expressions, 74) ->
+ 76;
+yeccgoto(anno_expressions, 79) ->
+ 80;
+yeccgoto(anno_expressions, 203) ->
+ 206;
+yeccgoto(anno_expressions, 260) ->
+ 261;
+yeccgoto(anno_fun, 20) ->
+ 22;
+yeccgoto(anno_function_name, 8) ->
+ 10;
+yeccgoto(anno_function_name, 12) ->
+ 10;
+yeccgoto(anno_function_name, 60) ->
+ 10;
+yeccgoto(anno_function_name, 316) ->
+ 10;
+yeccgoto(anno_pattern, 65) ->
+ 102;
+yeccgoto(anno_pattern, 96) ->
+ 102;
+yeccgoto(anno_pattern, 97) ->
+ 116;
+yeccgoto(anno_pattern, 98) ->
+ 173;
+yeccgoto(anno_pattern, 100) ->
+ 102;
+yeccgoto(anno_pattern, 114) ->
+ 116;
+yeccgoto(anno_pattern, 120) ->
+ 116;
+yeccgoto(anno_pattern, 162) ->
+ 163;
+yeccgoto(anno_pattern, 174) ->
+ 180;
+yeccgoto(anno_pattern, 177) ->
+ 178;
+yeccgoto(anno_pattern, 200) ->
+ 201;
+yeccgoto(anno_pattern, 222) ->
+ 102;
+yeccgoto(anno_patterns, 97) ->
+ 183;
+yeccgoto(anno_patterns, 114) ->
+ 117;
+yeccgoto(anno_patterns, 120) ->
+ 121;
+yeccgoto(anno_variable, 25) ->
+ 28;
+yeccgoto(anno_variable, 58) ->
+ 84;
+yeccgoto(anno_variable, 65) ->
+ 103;
+yeccgoto(anno_variable, 82) ->
+ 84;
+yeccgoto(anno_variable, 83) ->
+ 28;
+yeccgoto(anno_variable, 88) ->
+ 84;
+yeccgoto(anno_variable, 96) ->
+ 103;
+yeccgoto(anno_variable, 97) ->
+ 103;
+yeccgoto(anno_variable, 98) ->
+ 103;
+yeccgoto(anno_variable, 100) ->
+ 103;
+yeccgoto(anno_variable, 114) ->
+ 103;
+yeccgoto(anno_variable, 115) ->
+ 122;
+yeccgoto(anno_variable, 120) ->
+ 103;
+yeccgoto(anno_variable, 162) ->
+ 103;
+yeccgoto(anno_variable, 174) ->
+ 103;
+yeccgoto(anno_variable, 177) ->
+ 103;
+yeccgoto(anno_variable, 200) ->
+ 103;
+yeccgoto(anno_variable, 222) ->
+ 103;
+yeccgoto(anno_variable, 263) ->
+ 28;
+yeccgoto(anno_variables, 25) ->
+ 29;
+yeccgoto(anno_variables, 83) ->
+ 93;
+yeccgoto(anno_variables, 263) ->
+ 264;
+yeccgoto(annotation, 125) ->
+ 127;
+yeccgoto(annotation, 159) ->
+ 160;
+yeccgoto(annotation, 187) ->
+ 188;
+yeccgoto(annotation, 244) ->
+ 245;
+yeccgoto(annotation, 269) ->
+ 270;
+yeccgoto(annotation, 273) ->
+ 274;
+yeccgoto(annotation, 319) ->
+ 320;
+yeccgoto(application_expr, 33) ->
+ 39;
+yeccgoto(application_expr, 35) ->
+ 39;
+yeccgoto(application_expr, 36) ->
+ 39;
+yeccgoto(application_expr, 37) ->
+ 39;
+yeccgoto(application_expr, 40) ->
+ 39;
+yeccgoto(application_expr, 44) ->
+ 39;
+yeccgoto(application_expr, 46) ->
+ 39;
+yeccgoto(application_expr, 48) ->
+ 39;
+yeccgoto(application_expr, 52) ->
+ 39;
+yeccgoto(application_expr, 70) ->
+ 39;
+yeccgoto(application_expr, 74) ->
+ 39;
+yeccgoto(application_expr, 79) ->
+ 39;
+yeccgoto(application_expr, 86) ->
+ 39;
+yeccgoto(application_expr, 90) ->
+ 39;
+yeccgoto(application_expr, 99) ->
+ 39;
+yeccgoto(application_expr, 164) ->
+ 39;
+yeccgoto(application_expr, 166) ->
+ 39;
+yeccgoto(application_expr, 171) ->
+ 39;
+yeccgoto(application_expr, 203) ->
+ 39;
+yeccgoto(application_expr, 211) ->
+ 39;
+yeccgoto(application_expr, 214) ->
+ 39;
+yeccgoto(application_expr, 216) ->
+ 39;
+yeccgoto(application_expr, 218) ->
+ 39;
+yeccgoto(application_expr, 226) ->
+ 39;
+yeccgoto(application_expr, 232) ->
+ 39;
+yeccgoto(application_expr, 235) ->
+ 39;
+yeccgoto(application_expr, 257) ->
+ 39;
+yeccgoto(application_expr, 260) ->
+ 39;
+yeccgoto(application_expr, 265) ->
+ 39;
+yeccgoto(arg_list, 202) ->
+ 204;
+yeccgoto(arg_list, 208) ->
+ 209;
+yeccgoto(arg_list, 227) ->
+ 228;
+yeccgoto(arg_list, 229) ->
+ 230;
+yeccgoto(atomic_constant, 126) ->
+ 132;
+yeccgoto(atomic_constant, 129) ->
+ 132;
+yeccgoto(atomic_constant, 142) ->
+ 132;
+yeccgoto(atomic_constant, 147) ->
+ 132;
+yeccgoto(atomic_constant, 151) ->
+ 132;
+yeccgoto(atomic_constant, 154) ->
+ 132;
+yeccgoto(atomic_literal, 33) ->
+ 42;
+yeccgoto(atomic_literal, 35) ->
+ 42;
+yeccgoto(atomic_literal, 36) ->
+ 42;
+yeccgoto(atomic_literal, 37) ->
+ 42;
+yeccgoto(atomic_literal, 40) ->
+ 42;
+yeccgoto(atomic_literal, 44) ->
+ 42;
+yeccgoto(atomic_literal, 46) ->
+ 42;
+yeccgoto(atomic_literal, 48) ->
+ 42;
+yeccgoto(atomic_literal, 52) ->
+ 42;
+yeccgoto(atomic_literal, 65) ->
+ 105;
+yeccgoto(atomic_literal, 70) ->
+ 42;
+yeccgoto(atomic_literal, 74) ->
+ 42;
+yeccgoto(atomic_literal, 79) ->
+ 42;
+yeccgoto(atomic_literal, 86) ->
+ 42;
+yeccgoto(atomic_literal, 90) ->
+ 42;
+yeccgoto(atomic_literal, 96) ->
+ 105;
+yeccgoto(atomic_literal, 97) ->
+ 105;
+yeccgoto(atomic_literal, 98) ->
+ 105;
+yeccgoto(atomic_literal, 99) ->
+ 42;
+yeccgoto(atomic_literal, 100) ->
+ 105;
+yeccgoto(atomic_literal, 114) ->
+ 105;
+yeccgoto(atomic_literal, 115) ->
+ 105;
+yeccgoto(atomic_literal, 120) ->
+ 105;
+yeccgoto(atomic_literal, 162) ->
+ 105;
+yeccgoto(atomic_literal, 164) ->
+ 42;
+yeccgoto(atomic_literal, 166) ->
+ 42;
+yeccgoto(atomic_literal, 171) ->
+ 42;
+yeccgoto(atomic_literal, 174) ->
+ 105;
+yeccgoto(atomic_literal, 177) ->
+ 105;
+yeccgoto(atomic_literal, 200) ->
+ 105;
+yeccgoto(atomic_literal, 203) ->
+ 42;
+yeccgoto(atomic_literal, 211) ->
+ 42;
+yeccgoto(atomic_literal, 214) ->
+ 42;
+yeccgoto(atomic_literal, 216) ->
+ 42;
+yeccgoto(atomic_literal, 218) ->
+ 42;
+yeccgoto(atomic_literal, 222) ->
+ 105;
+yeccgoto(atomic_literal, 226) ->
+ 42;
+yeccgoto(atomic_literal, 232) ->
+ 42;
+yeccgoto(atomic_literal, 235) ->
+ 42;
+yeccgoto(atomic_literal, 257) ->
+ 42;
+yeccgoto(atomic_literal, 260) ->
+ 42;
+yeccgoto(atomic_literal, 265) ->
+ 42;
+yeccgoto(atomic_literal, 284) ->
+ 286;
+yeccgoto(atomic_literal, 285) ->
+ 286;
+yeccgoto(atomic_literal, 290) ->
+ 286;
+yeccgoto(atomic_literal, 295) ->
+ 286;
+yeccgoto(atomic_literal, 298) ->
+ 286;
+yeccgoto(atomic_literal, 301) ->
+ 286;
+yeccgoto(atomic_pattern, 65) ->
+ 106;
+yeccgoto(atomic_pattern, 96) ->
+ 106;
+yeccgoto(atomic_pattern, 97) ->
+ 106;
+yeccgoto(atomic_pattern, 98) ->
+ 106;
+yeccgoto(atomic_pattern, 100) ->
+ 106;
+yeccgoto(atomic_pattern, 114) ->
+ 106;
+yeccgoto(atomic_pattern, 115) ->
+ 106;
+yeccgoto(atomic_pattern, 120) ->
+ 106;
+yeccgoto(atomic_pattern, 162) ->
+ 106;
+yeccgoto(atomic_pattern, 174) ->
+ 106;
+yeccgoto(atomic_pattern, 177) ->
+ 106;
+yeccgoto(atomic_pattern, 200) ->
+ 106;
+yeccgoto(atomic_pattern, 222) ->
+ 106;
+yeccgoto(attribute, 276) ->
+ 279;
+yeccgoto(attribute, 282) ->
+ 279;
+yeccgoto(attribute_list, 276) ->
+ 280;
+yeccgoto(attribute_list, 282) ->
+ 283;
+yeccgoto(binary, 33) ->
+ 43;
+yeccgoto(binary, 35) ->
+ 43;
+yeccgoto(binary, 36) ->
+ 43;
+yeccgoto(binary, 37) ->
+ 43;
+yeccgoto(binary, 40) ->
+ 43;
+yeccgoto(binary, 44) ->
+ 43;
+yeccgoto(binary, 46) ->
+ 43;
+yeccgoto(binary, 48) ->
+ 43;
+yeccgoto(binary, 52) ->
+ 43;
+yeccgoto(binary, 70) ->
+ 43;
+yeccgoto(binary, 74) ->
+ 43;
+yeccgoto(binary, 79) ->
+ 43;
+yeccgoto(binary, 86) ->
+ 43;
+yeccgoto(binary, 90) ->
+ 43;
+yeccgoto(binary, 99) ->
+ 43;
+yeccgoto(binary, 164) ->
+ 43;
+yeccgoto(binary, 166) ->
+ 43;
+yeccgoto(binary, 171) ->
+ 43;
+yeccgoto(binary, 203) ->
+ 43;
+yeccgoto(binary, 211) ->
+ 43;
+yeccgoto(binary, 214) ->
+ 43;
+yeccgoto(binary, 216) ->
+ 43;
+yeccgoto(binary, 218) ->
+ 43;
+yeccgoto(binary, 226) ->
+ 43;
+yeccgoto(binary, 232) ->
+ 43;
+yeccgoto(binary, 235) ->
+ 43;
+yeccgoto(binary, 257) ->
+ 43;
+yeccgoto(binary, 260) ->
+ 43;
+yeccgoto(binary, 265) ->
+ 43;
+yeccgoto(binary_pattern, 65) ->
+ 107;
+yeccgoto(binary_pattern, 96) ->
+ 107;
+yeccgoto(binary_pattern, 97) ->
+ 107;
+yeccgoto(binary_pattern, 98) ->
+ 107;
+yeccgoto(binary_pattern, 100) ->
+ 107;
+yeccgoto(binary_pattern, 114) ->
+ 107;
+yeccgoto(binary_pattern, 115) ->
+ 107;
+yeccgoto(binary_pattern, 120) ->
+ 107;
+yeccgoto(binary_pattern, 162) ->
+ 107;
+yeccgoto(binary_pattern, 174) ->
+ 107;
+yeccgoto(binary_pattern, 177) ->
+ 107;
+yeccgoto(binary_pattern, 200) ->
+ 107;
+yeccgoto(binary_pattern, 222) ->
+ 107;
+yeccgoto(call_expr, 33) ->
+ 45;
+yeccgoto(call_expr, 35) ->
+ 45;
+yeccgoto(call_expr, 36) ->
+ 45;
+yeccgoto(call_expr, 37) ->
+ 45;
+yeccgoto(call_expr, 40) ->
+ 45;
+yeccgoto(call_expr, 44) ->
+ 45;
+yeccgoto(call_expr, 46) ->
+ 45;
+yeccgoto(call_expr, 48) ->
+ 45;
+yeccgoto(call_expr, 52) ->
+ 45;
+yeccgoto(call_expr, 70) ->
+ 45;
+yeccgoto(call_expr, 74) ->
+ 45;
+yeccgoto(call_expr, 79) ->
+ 45;
+yeccgoto(call_expr, 86) ->
+ 45;
+yeccgoto(call_expr, 90) ->
+ 45;
+yeccgoto(call_expr, 99) ->
+ 45;
+yeccgoto(call_expr, 164) ->
+ 45;
+yeccgoto(call_expr, 166) ->
+ 45;
+yeccgoto(call_expr, 171) ->
+ 45;
+yeccgoto(call_expr, 203) ->
+ 45;
+yeccgoto(call_expr, 211) ->
+ 45;
+yeccgoto(call_expr, 214) ->
+ 45;
+yeccgoto(call_expr, 216) ->
+ 45;
+yeccgoto(call_expr, 218) ->
+ 45;
+yeccgoto(call_expr, 226) ->
+ 45;
+yeccgoto(call_expr, 232) ->
+ 45;
+yeccgoto(call_expr, 235) ->
+ 45;
+yeccgoto(call_expr, 257) ->
+ 45;
+yeccgoto(call_expr, 260) ->
+ 45;
+yeccgoto(call_expr, 265) ->
+ 45;
+yeccgoto(case_expr, 33) ->
+ 47;
+yeccgoto(case_expr, 35) ->
+ 47;
+yeccgoto(case_expr, 36) ->
+ 47;
+yeccgoto(case_expr, 37) ->
+ 47;
+yeccgoto(case_expr, 40) ->
+ 47;
+yeccgoto(case_expr, 44) ->
+ 47;
+yeccgoto(case_expr, 46) ->
+ 47;
+yeccgoto(case_expr, 48) ->
+ 47;
+yeccgoto(case_expr, 52) ->
+ 47;
+yeccgoto(case_expr, 70) ->
+ 47;
+yeccgoto(case_expr, 74) ->
+ 47;
+yeccgoto(case_expr, 79) ->
+ 47;
+yeccgoto(case_expr, 86) ->
+ 47;
+yeccgoto(case_expr, 90) ->
+ 47;
+yeccgoto(case_expr, 99) ->
+ 47;
+yeccgoto(case_expr, 164) ->
+ 47;
+yeccgoto(case_expr, 166) ->
+ 47;
+yeccgoto(case_expr, 171) ->
+ 47;
+yeccgoto(case_expr, 203) ->
+ 47;
+yeccgoto(case_expr, 211) ->
+ 47;
+yeccgoto(case_expr, 214) ->
+ 47;
+yeccgoto(case_expr, 216) ->
+ 47;
+yeccgoto(case_expr, 218) ->
+ 47;
+yeccgoto(case_expr, 226) ->
+ 47;
+yeccgoto(case_expr, 232) ->
+ 47;
+yeccgoto(case_expr, 235) ->
+ 47;
+yeccgoto(case_expr, 257) ->
+ 47;
+yeccgoto(case_expr, 260) ->
+ 47;
+yeccgoto(case_expr, 265) ->
+ 47;
+yeccgoto(catch_expr, 33) ->
+ 49;
+yeccgoto(catch_expr, 35) ->
+ 49;
+yeccgoto(catch_expr, 36) ->
+ 49;
+yeccgoto(catch_expr, 37) ->
+ 49;
+yeccgoto(catch_expr, 40) ->
+ 49;
+yeccgoto(catch_expr, 44) ->
+ 49;
+yeccgoto(catch_expr, 46) ->
+ 49;
+yeccgoto(catch_expr, 48) ->
+ 49;
+yeccgoto(catch_expr, 52) ->
+ 49;
+yeccgoto(catch_expr, 70) ->
+ 49;
+yeccgoto(catch_expr, 74) ->
+ 49;
+yeccgoto(catch_expr, 79) ->
+ 49;
+yeccgoto(catch_expr, 86) ->
+ 49;
+yeccgoto(catch_expr, 90) ->
+ 49;
+yeccgoto(catch_expr, 99) ->
+ 49;
+yeccgoto(catch_expr, 164) ->
+ 49;
+yeccgoto(catch_expr, 166) ->
+ 49;
+yeccgoto(catch_expr, 171) ->
+ 49;
+yeccgoto(catch_expr, 203) ->
+ 49;
+yeccgoto(catch_expr, 211) ->
+ 49;
+yeccgoto(catch_expr, 214) ->
+ 49;
+yeccgoto(catch_expr, 216) ->
+ 49;
+yeccgoto(catch_expr, 218) ->
+ 49;
+yeccgoto(catch_expr, 226) ->
+ 49;
+yeccgoto(catch_expr, 232) ->
+ 49;
+yeccgoto(catch_expr, 235) ->
+ 49;
+yeccgoto(catch_expr, 257) ->
+ 49;
+yeccgoto(catch_expr, 260) ->
+ 49;
+yeccgoto(catch_expr, 265) ->
+ 49;
+yeccgoto(clause, 65) ->
+ 108;
+yeccgoto(clause, 96) ->
+ 185;
+yeccgoto(clause, 100) ->
+ 108;
+yeccgoto(clause, 222) ->
+ 108;
+yeccgoto(clause_pattern, 65) ->
+ 109;
+yeccgoto(clause_pattern, 96) ->
+ 109;
+yeccgoto(clause_pattern, 100) ->
+ 109;
+yeccgoto(clause_pattern, 222) ->
+ 109;
+yeccgoto(cons, 33) ->
+ 51;
+yeccgoto(cons, 35) ->
+ 51;
+yeccgoto(cons, 36) ->
+ 51;
+yeccgoto(cons, 37) ->
+ 51;
+yeccgoto(cons, 40) ->
+ 51;
+yeccgoto(cons, 44) ->
+ 51;
+yeccgoto(cons, 46) ->
+ 51;
+yeccgoto(cons, 48) ->
+ 51;
+yeccgoto(cons, 52) ->
+ 51;
+yeccgoto(cons, 70) ->
+ 51;
+yeccgoto(cons, 74) ->
+ 51;
+yeccgoto(cons, 79) ->
+ 51;
+yeccgoto(cons, 86) ->
+ 51;
+yeccgoto(cons, 90) ->
+ 51;
+yeccgoto(cons, 99) ->
+ 51;
+yeccgoto(cons, 164) ->
+ 51;
+yeccgoto(cons, 166) ->
+ 51;
+yeccgoto(cons, 171) ->
+ 51;
+yeccgoto(cons, 203) ->
+ 51;
+yeccgoto(cons, 211) ->
+ 51;
+yeccgoto(cons, 214) ->
+ 51;
+yeccgoto(cons, 216) ->
+ 51;
+yeccgoto(cons, 218) ->
+ 51;
+yeccgoto(cons, 226) ->
+ 51;
+yeccgoto(cons, 232) ->
+ 51;
+yeccgoto(cons, 235) ->
+ 51;
+yeccgoto(cons, 257) ->
+ 51;
+yeccgoto(cons, 260) ->
+ 51;
+yeccgoto(cons, 265) ->
+ 51;
+yeccgoto(cons_constant, 126) ->
+ 134;
+yeccgoto(cons_constant, 129) ->
+ 134;
+yeccgoto(cons_constant, 142) ->
+ 134;
+yeccgoto(cons_constant, 147) ->
+ 134;
+yeccgoto(cons_constant, 151) ->
+ 134;
+yeccgoto(cons_constant, 154) ->
+ 134;
+yeccgoto(cons_literal, 284) ->
+ 287;
+yeccgoto(cons_literal, 285) ->
+ 287;
+yeccgoto(cons_literal, 290) ->
+ 287;
+yeccgoto(cons_literal, 295) ->
+ 287;
+yeccgoto(cons_literal, 298) ->
+ 287;
+yeccgoto(cons_literal, 301) ->
+ 287;
+yeccgoto(cons_pattern, 65) ->
+ 110;
+yeccgoto(cons_pattern, 96) ->
+ 110;
+yeccgoto(cons_pattern, 97) ->
+ 110;
+yeccgoto(cons_pattern, 98) ->
+ 110;
+yeccgoto(cons_pattern, 100) ->
+ 110;
+yeccgoto(cons_pattern, 114) ->
+ 110;
+yeccgoto(cons_pattern, 115) ->
+ 110;
+yeccgoto(cons_pattern, 120) ->
+ 110;
+yeccgoto(cons_pattern, 162) ->
+ 110;
+yeccgoto(cons_pattern, 174) ->
+ 110;
+yeccgoto(cons_pattern, 177) ->
+ 110;
+yeccgoto(cons_pattern, 200) ->
+ 110;
+yeccgoto(cons_pattern, 222) ->
+ 110;
+yeccgoto(constant, 126) ->
+ 135;
+yeccgoto(constant, 129) ->
+ 150;
+yeccgoto(constant, 142) ->
+ 135;
+yeccgoto(constant, 147) ->
+ 135;
+yeccgoto(constant, 151) ->
+ 157;
+yeccgoto(constant, 154) ->
+ 155;
+yeccgoto(constants, 126) ->
+ 136;
+yeccgoto(constants, 142) ->
+ 143;
+yeccgoto(constants, 147) ->
+ 148;
+yeccgoto(exported_name, 5) ->
+ 307;
+yeccgoto(exported_name, 311) ->
+ 307;
+yeccgoto(exported_names, 5) ->
+ 308;
+yeccgoto(exported_names, 311) ->
+ 312;
+yeccgoto(expression, 33) ->
+ 53;
+yeccgoto(expression, 35) ->
+ 243;
+yeccgoto(expression, 36) ->
+ 53;
+yeccgoto(expression, 37) ->
+ 53;
+yeccgoto(expression, 40) ->
+ 53;
+yeccgoto(expression, 44) ->
+ 53;
+yeccgoto(expression, 46) ->
+ 53;
+yeccgoto(expression, 48) ->
+ 53;
+yeccgoto(expression, 52) ->
+ 53;
+yeccgoto(expression, 70) ->
+ 53;
+yeccgoto(expression, 74) ->
+ 53;
+yeccgoto(expression, 79) ->
+ 53;
+yeccgoto(expression, 86) ->
+ 53;
+yeccgoto(expression, 90) ->
+ 53;
+yeccgoto(expression, 99) ->
+ 53;
+yeccgoto(expression, 164) ->
+ 53;
+yeccgoto(expression, 166) ->
+ 53;
+yeccgoto(expression, 171) ->
+ 53;
+yeccgoto(expression, 203) ->
+ 53;
+yeccgoto(expression, 211) ->
+ 53;
+yeccgoto(expression, 214) ->
+ 53;
+yeccgoto(expression, 216) ->
+ 53;
+yeccgoto(expression, 218) ->
+ 53;
+yeccgoto(expression, 226) ->
+ 53;
+yeccgoto(expression, 232) ->
+ 53;
+yeccgoto(expression, 235) ->
+ 53;
+yeccgoto(expression, 257) ->
+ 53;
+yeccgoto(expression, 260) ->
+ 53;
+yeccgoto(expression, 265) ->
+ 53;
+yeccgoto(fun_expr, 20) ->
+ 24;
+yeccgoto(fun_expr, 21) ->
+ 268;
+yeccgoto(fun_expr, 33) ->
+ 55;
+yeccgoto(fun_expr, 35) ->
+ 55;
+yeccgoto(fun_expr, 36) ->
+ 55;
+yeccgoto(fun_expr, 37) ->
+ 55;
+yeccgoto(fun_expr, 40) ->
+ 55;
+yeccgoto(fun_expr, 44) ->
+ 55;
+yeccgoto(fun_expr, 46) ->
+ 55;
+yeccgoto(fun_expr, 48) ->
+ 55;
+yeccgoto(fun_expr, 52) ->
+ 55;
+yeccgoto(fun_expr, 70) ->
+ 55;
+yeccgoto(fun_expr, 74) ->
+ 55;
+yeccgoto(fun_expr, 79) ->
+ 55;
+yeccgoto(fun_expr, 86) ->
+ 55;
+yeccgoto(fun_expr, 90) ->
+ 55;
+yeccgoto(fun_expr, 99) ->
+ 55;
+yeccgoto(fun_expr, 164) ->
+ 55;
+yeccgoto(fun_expr, 166) ->
+ 55;
+yeccgoto(fun_expr, 171) ->
+ 55;
+yeccgoto(fun_expr, 203) ->
+ 55;
+yeccgoto(fun_expr, 211) ->
+ 55;
+yeccgoto(fun_expr, 214) ->
+ 55;
+yeccgoto(fun_expr, 216) ->
+ 55;
+yeccgoto(fun_expr, 218) ->
+ 55;
+yeccgoto(fun_expr, 226) ->
+ 55;
+yeccgoto(fun_expr, 232) ->
+ 55;
+yeccgoto(fun_expr, 235) ->
+ 55;
+yeccgoto(fun_expr, 257) ->
+ 55;
+yeccgoto(fun_expr, 260) ->
+ 55;
+yeccgoto(fun_expr, 265) ->
+ 55;
+yeccgoto(function_definition, 8) ->
+ 12;
+yeccgoto(function_definition, 12) ->
+ 12;
+yeccgoto(function_definition, 60) ->
+ 12;
+yeccgoto(function_definition, 316) ->
+ 12;
+yeccgoto(function_definitions, 8) ->
+ 13;
+yeccgoto(function_definitions, 12) ->
+ 17;
+yeccgoto(function_definitions, 60) ->
+ 210;
+yeccgoto(function_definitions, 316) ->
+ 13;
+yeccgoto(function_name, 5) ->
+ 309;
+yeccgoto(function_name, 8) ->
+ 14;
+yeccgoto(function_name, 9) ->
+ 272;
+yeccgoto(function_name, 12) ->
+ 14;
+yeccgoto(function_name, 33) ->
+ 56;
+yeccgoto(function_name, 35) ->
+ 56;
+yeccgoto(function_name, 36) ->
+ 56;
+yeccgoto(function_name, 37) ->
+ 56;
+yeccgoto(function_name, 40) ->
+ 56;
+yeccgoto(function_name, 44) ->
+ 56;
+yeccgoto(function_name, 46) ->
+ 56;
+yeccgoto(function_name, 48) ->
+ 56;
+yeccgoto(function_name, 52) ->
+ 56;
+yeccgoto(function_name, 60) ->
+ 14;
+yeccgoto(function_name, 70) ->
+ 56;
+yeccgoto(function_name, 74) ->
+ 56;
+yeccgoto(function_name, 79) ->
+ 56;
+yeccgoto(function_name, 86) ->
+ 56;
+yeccgoto(function_name, 90) ->
+ 56;
+yeccgoto(function_name, 99) ->
+ 56;
+yeccgoto(function_name, 164) ->
+ 56;
+yeccgoto(function_name, 166) ->
+ 56;
+yeccgoto(function_name, 171) ->
+ 56;
+yeccgoto(function_name, 203) ->
+ 56;
+yeccgoto(function_name, 211) ->
+ 56;
+yeccgoto(function_name, 214) ->
+ 56;
+yeccgoto(function_name, 216) ->
+ 56;
+yeccgoto(function_name, 218) ->
+ 56;
+yeccgoto(function_name, 226) ->
+ 56;
+yeccgoto(function_name, 232) ->
+ 56;
+yeccgoto(function_name, 235) ->
+ 56;
+yeccgoto(function_name, 257) ->
+ 56;
+yeccgoto(function_name, 260) ->
+ 56;
+yeccgoto(function_name, 265) ->
+ 56;
+yeccgoto(function_name, 311) ->
+ 309;
+yeccgoto(function_name, 316) ->
+ 14;
+yeccgoto(let_expr, 33) ->
+ 59;
+yeccgoto(let_expr, 35) ->
+ 59;
+yeccgoto(let_expr, 36) ->
+ 59;
+yeccgoto(let_expr, 37) ->
+ 59;
+yeccgoto(let_expr, 40) ->
+ 59;
+yeccgoto(let_expr, 44) ->
+ 59;
+yeccgoto(let_expr, 46) ->
+ 59;
+yeccgoto(let_expr, 48) ->
+ 59;
+yeccgoto(let_expr, 52) ->
+ 59;
+yeccgoto(let_expr, 70) ->
+ 59;
+yeccgoto(let_expr, 74) ->
+ 59;
+yeccgoto(let_expr, 79) ->
+ 59;
+yeccgoto(let_expr, 86) ->
+ 59;
+yeccgoto(let_expr, 90) ->
+ 59;
+yeccgoto(let_expr, 99) ->
+ 59;
+yeccgoto(let_expr, 164) ->
+ 59;
+yeccgoto(let_expr, 166) ->
+ 59;
+yeccgoto(let_expr, 171) ->
+ 59;
+yeccgoto(let_expr, 203) ->
+ 59;
+yeccgoto(let_expr, 211) ->
+ 59;
+yeccgoto(let_expr, 214) ->
+ 59;
+yeccgoto(let_expr, 216) ->
+ 59;
+yeccgoto(let_expr, 218) ->
+ 59;
+yeccgoto(let_expr, 226) ->
+ 59;
+yeccgoto(let_expr, 232) ->
+ 59;
+yeccgoto(let_expr, 235) ->
+ 59;
+yeccgoto(let_expr, 257) ->
+ 59;
+yeccgoto(let_expr, 260) ->
+ 59;
+yeccgoto(let_expr, 265) ->
+ 59;
+yeccgoto(let_vars, 58) ->
+ 213;
+yeccgoto(let_vars, 82) ->
+ 85;
+yeccgoto(let_vars, 88) ->
+ 89;
+yeccgoto(letrec_expr, 33) ->
+ 61;
+yeccgoto(letrec_expr, 35) ->
+ 61;
+yeccgoto(letrec_expr, 36) ->
+ 61;
+yeccgoto(letrec_expr, 37) ->
+ 61;
+yeccgoto(letrec_expr, 40) ->
+ 61;
+yeccgoto(letrec_expr, 44) ->
+ 61;
+yeccgoto(letrec_expr, 46) ->
+ 61;
+yeccgoto(letrec_expr, 48) ->
+ 61;
+yeccgoto(letrec_expr, 52) ->
+ 61;
+yeccgoto(letrec_expr, 70) ->
+ 61;
+yeccgoto(letrec_expr, 74) ->
+ 61;
+yeccgoto(letrec_expr, 79) ->
+ 61;
+yeccgoto(letrec_expr, 86) ->
+ 61;
+yeccgoto(letrec_expr, 90) ->
+ 61;
+yeccgoto(letrec_expr, 99) ->
+ 61;
+yeccgoto(letrec_expr, 164) ->
+ 61;
+yeccgoto(letrec_expr, 166) ->
+ 61;
+yeccgoto(letrec_expr, 171) ->
+ 61;
+yeccgoto(letrec_expr, 203) ->
+ 61;
+yeccgoto(letrec_expr, 211) ->
+ 61;
+yeccgoto(letrec_expr, 214) ->
+ 61;
+yeccgoto(letrec_expr, 216) ->
+ 61;
+yeccgoto(letrec_expr, 218) ->
+ 61;
+yeccgoto(letrec_expr, 226) ->
+ 61;
+yeccgoto(letrec_expr, 232) ->
+ 61;
+yeccgoto(letrec_expr, 235) ->
+ 61;
+yeccgoto(letrec_expr, 257) ->
+ 61;
+yeccgoto(letrec_expr, 260) ->
+ 61;
+yeccgoto(letrec_expr, 265) ->
+ 61;
+yeccgoto(literal, 284) ->
+ 288;
+yeccgoto(literal, 285) ->
+ 297;
+yeccgoto(literal, 290) ->
+ 291;
+yeccgoto(literal, 295) ->
+ 291;
+yeccgoto(literal, 298) ->
+ 304;
+yeccgoto(literal, 301) ->
+ 302;
+yeccgoto(literals, 290) ->
+ 292;
+yeccgoto(literals, 295) ->
+ 296;
+yeccgoto(module_attribute, 6) ->
+ 8;
+yeccgoto(module_attribute, 315) ->
+ 316;
+yeccgoto(module_definition, 0) ->
+ 3;
+yeccgoto(module_defs, 8) ->
+ 15;
+yeccgoto(module_defs, 316) ->
+ 317;
+yeccgoto(module_export, 4) ->
+ 6;
+yeccgoto(module_export, 314) ->
+ 315;
+yeccgoto(nil, 33) ->
+ 62;
+yeccgoto(nil, 35) ->
+ 62;
+yeccgoto(nil, 36) ->
+ 62;
+yeccgoto(nil, 37) ->
+ 62;
+yeccgoto(nil, 40) ->
+ 62;
+yeccgoto(nil, 44) ->
+ 62;
+yeccgoto(nil, 46) ->
+ 62;
+yeccgoto(nil, 48) ->
+ 62;
+yeccgoto(nil, 52) ->
+ 62;
+yeccgoto(nil, 65) ->
+ 62;
+yeccgoto(nil, 70) ->
+ 62;
+yeccgoto(nil, 74) ->
+ 62;
+yeccgoto(nil, 79) ->
+ 62;
+yeccgoto(nil, 86) ->
+ 62;
+yeccgoto(nil, 90) ->
+ 62;
+yeccgoto(nil, 96) ->
+ 62;
+yeccgoto(nil, 97) ->
+ 62;
+yeccgoto(nil, 98) ->
+ 62;
+yeccgoto(nil, 99) ->
+ 62;
+yeccgoto(nil, 100) ->
+ 62;
+yeccgoto(nil, 114) ->
+ 62;
+yeccgoto(nil, 115) ->
+ 62;
+yeccgoto(nil, 120) ->
+ 62;
+yeccgoto(nil, 126) ->
+ 139;
+yeccgoto(nil, 129) ->
+ 139;
+yeccgoto(nil, 142) ->
+ 139;
+yeccgoto(nil, 147) ->
+ 139;
+yeccgoto(nil, 151) ->
+ 139;
+yeccgoto(nil, 154) ->
+ 139;
+yeccgoto(nil, 162) ->
+ 62;
+yeccgoto(nil, 164) ->
+ 62;
+yeccgoto(nil, 166) ->
+ 62;
+yeccgoto(nil, 171) ->
+ 62;
+yeccgoto(nil, 174) ->
+ 62;
+yeccgoto(nil, 177) ->
+ 62;
+yeccgoto(nil, 200) ->
+ 62;
+yeccgoto(nil, 203) ->
+ 62;
+yeccgoto(nil, 211) ->
+ 62;
+yeccgoto(nil, 214) ->
+ 62;
+yeccgoto(nil, 216) ->
+ 62;
+yeccgoto(nil, 218) ->
+ 62;
+yeccgoto(nil, 222) ->
+ 62;
+yeccgoto(nil, 226) ->
+ 62;
+yeccgoto(nil, 232) ->
+ 62;
+yeccgoto(nil, 235) ->
+ 62;
+yeccgoto(nil, 257) ->
+ 62;
+yeccgoto(nil, 260) ->
+ 62;
+yeccgoto(nil, 265) ->
+ 62;
+yeccgoto(nil, 284) ->
+ 62;
+yeccgoto(nil, 285) ->
+ 62;
+yeccgoto(nil, 290) ->
+ 62;
+yeccgoto(nil, 295) ->
+ 62;
+yeccgoto(nil, 298) ->
+ 62;
+yeccgoto(nil, 301) ->
+ 62;
+yeccgoto(other_pattern, 65) ->
+ 111;
+yeccgoto(other_pattern, 96) ->
+ 186;
+yeccgoto(other_pattern, 97) ->
+ 111;
+yeccgoto(other_pattern, 98) ->
+ 111;
+yeccgoto(other_pattern, 100) ->
+ 111;
+yeccgoto(other_pattern, 114) ->
+ 111;
+yeccgoto(other_pattern, 115) ->
+ 123;
+yeccgoto(other_pattern, 120) ->
+ 111;
+yeccgoto(other_pattern, 162) ->
+ 111;
+yeccgoto(other_pattern, 174) ->
+ 111;
+yeccgoto(other_pattern, 177) ->
+ 111;
+yeccgoto(other_pattern, 200) ->
+ 111;
+yeccgoto(other_pattern, 222) ->
+ 111;
+yeccgoto(primop_expr, 33) ->
+ 64;
+yeccgoto(primop_expr, 35) ->
+ 64;
+yeccgoto(primop_expr, 36) ->
+ 64;
+yeccgoto(primop_expr, 37) ->
+ 64;
+yeccgoto(primop_expr, 40) ->
+ 64;
+yeccgoto(primop_expr, 44) ->
+ 64;
+yeccgoto(primop_expr, 46) ->
+ 64;
+yeccgoto(primop_expr, 48) ->
+ 64;
+yeccgoto(primop_expr, 52) ->
+ 64;
+yeccgoto(primop_expr, 70) ->
+ 64;
+yeccgoto(primop_expr, 74) ->
+ 64;
+yeccgoto(primop_expr, 79) ->
+ 64;
+yeccgoto(primop_expr, 86) ->
+ 64;
+yeccgoto(primop_expr, 90) ->
+ 64;
+yeccgoto(primop_expr, 99) ->
+ 64;
+yeccgoto(primop_expr, 164) ->
+ 64;
+yeccgoto(primop_expr, 166) ->
+ 64;
+yeccgoto(primop_expr, 171) ->
+ 64;
+yeccgoto(primop_expr, 203) ->
+ 64;
+yeccgoto(primop_expr, 211) ->
+ 64;
+yeccgoto(primop_expr, 214) ->
+ 64;
+yeccgoto(primop_expr, 216) ->
+ 64;
+yeccgoto(primop_expr, 218) ->
+ 64;
+yeccgoto(primop_expr, 226) ->
+ 64;
+yeccgoto(primop_expr, 232) ->
+ 64;
+yeccgoto(primop_expr, 235) ->
+ 64;
+yeccgoto(primop_expr, 257) ->
+ 64;
+yeccgoto(primop_expr, 260) ->
+ 64;
+yeccgoto(primop_expr, 265) ->
+ 64;
+yeccgoto(receive_expr, 33) ->
+ 66;
+yeccgoto(receive_expr, 35) ->
+ 66;
+yeccgoto(receive_expr, 36) ->
+ 66;
+yeccgoto(receive_expr, 37) ->
+ 66;
+yeccgoto(receive_expr, 40) ->
+ 66;
+yeccgoto(receive_expr, 44) ->
+ 66;
+yeccgoto(receive_expr, 46) ->
+ 66;
+yeccgoto(receive_expr, 48) ->
+ 66;
+yeccgoto(receive_expr, 52) ->
+ 66;
+yeccgoto(receive_expr, 70) ->
+ 66;
+yeccgoto(receive_expr, 74) ->
+ 66;
+yeccgoto(receive_expr, 79) ->
+ 66;
+yeccgoto(receive_expr, 86) ->
+ 66;
+yeccgoto(receive_expr, 90) ->
+ 66;
+yeccgoto(receive_expr, 99) ->
+ 66;
+yeccgoto(receive_expr, 164) ->
+ 66;
+yeccgoto(receive_expr, 166) ->
+ 66;
+yeccgoto(receive_expr, 171) ->
+ 66;
+yeccgoto(receive_expr, 203) ->
+ 66;
+yeccgoto(receive_expr, 211) ->
+ 66;
+yeccgoto(receive_expr, 214) ->
+ 66;
+yeccgoto(receive_expr, 216) ->
+ 66;
+yeccgoto(receive_expr, 218) ->
+ 66;
+yeccgoto(receive_expr, 226) ->
+ 66;
+yeccgoto(receive_expr, 232) ->
+ 66;
+yeccgoto(receive_expr, 235) ->
+ 66;
+yeccgoto(receive_expr, 257) ->
+ 66;
+yeccgoto(receive_expr, 260) ->
+ 66;
+yeccgoto(receive_expr, 265) ->
+ 66;
+yeccgoto(segment, 247) ->
+ 249;
+yeccgoto(segment, 255) ->
+ 249;
+yeccgoto(segment_pattern, 190) ->
+ 192;
+yeccgoto(segment_pattern, 198) ->
+ 192;
+yeccgoto(segment_patterns, 190) ->
+ 193;
+yeccgoto(segment_patterns, 198) ->
+ 199;
+yeccgoto(segments, 247) ->
+ 250;
+yeccgoto(segments, 255) ->
+ 256;
+yeccgoto(sequence, 33) ->
+ 67;
+yeccgoto(sequence, 35) ->
+ 67;
+yeccgoto(sequence, 36) ->
+ 67;
+yeccgoto(sequence, 37) ->
+ 67;
+yeccgoto(sequence, 40) ->
+ 67;
+yeccgoto(sequence, 44) ->
+ 67;
+yeccgoto(sequence, 46) ->
+ 67;
+yeccgoto(sequence, 48) ->
+ 67;
+yeccgoto(sequence, 52) ->
+ 67;
+yeccgoto(sequence, 70) ->
+ 67;
+yeccgoto(sequence, 74) ->
+ 67;
+yeccgoto(sequence, 79) ->
+ 67;
+yeccgoto(sequence, 86) ->
+ 67;
+yeccgoto(sequence, 90) ->
+ 67;
+yeccgoto(sequence, 99) ->
+ 67;
+yeccgoto(sequence, 164) ->
+ 67;
+yeccgoto(sequence, 166) ->
+ 67;
+yeccgoto(sequence, 171) ->
+ 67;
+yeccgoto(sequence, 203) ->
+ 67;
+yeccgoto(sequence, 211) ->
+ 67;
+yeccgoto(sequence, 214) ->
+ 67;
+yeccgoto(sequence, 216) ->
+ 67;
+yeccgoto(sequence, 218) ->
+ 67;
+yeccgoto(sequence, 226) ->
+ 67;
+yeccgoto(sequence, 232) ->
+ 67;
+yeccgoto(sequence, 235) ->
+ 67;
+yeccgoto(sequence, 257) ->
+ 67;
+yeccgoto(sequence, 260) ->
+ 67;
+yeccgoto(sequence, 265) ->
+ 67;
+yeccgoto(single_expression, 33) ->
+ 68;
+yeccgoto(single_expression, 35) ->
+ 68;
+yeccgoto(single_expression, 36) ->
+ 68;
+yeccgoto(single_expression, 37) ->
+ 68;
+yeccgoto(single_expression, 40) ->
+ 68;
+yeccgoto(single_expression, 44) ->
+ 68;
+yeccgoto(single_expression, 46) ->
+ 68;
+yeccgoto(single_expression, 48) ->
+ 68;
+yeccgoto(single_expression, 52) ->
+ 68;
+yeccgoto(single_expression, 70) ->
+ 68;
+yeccgoto(single_expression, 74) ->
+ 68;
+yeccgoto(single_expression, 79) ->
+ 68;
+yeccgoto(single_expression, 86) ->
+ 68;
+yeccgoto(single_expression, 90) ->
+ 68;
+yeccgoto(single_expression, 99) ->
+ 68;
+yeccgoto(single_expression, 164) ->
+ 68;
+yeccgoto(single_expression, 166) ->
+ 68;
+yeccgoto(single_expression, 171) ->
+ 68;
+yeccgoto(single_expression, 203) ->
+ 68;
+yeccgoto(single_expression, 211) ->
+ 68;
+yeccgoto(single_expression, 214) ->
+ 68;
+yeccgoto(single_expression, 216) ->
+ 68;
+yeccgoto(single_expression, 218) ->
+ 68;
+yeccgoto(single_expression, 226) ->
+ 68;
+yeccgoto(single_expression, 232) ->
+ 68;
+yeccgoto(single_expression, 235) ->
+ 68;
+yeccgoto(single_expression, 257) ->
+ 68;
+yeccgoto(single_expression, 260) ->
+ 68;
+yeccgoto(single_expression, 265) ->
+ 68;
+yeccgoto(tail, 231) ->
+ 234;
+yeccgoto(tail, 238) ->
+ 239;
+yeccgoto(tail_constant, 150) ->
+ 153;
+yeccgoto(tail_constant, 157) ->
+ 158;
+yeccgoto(tail_literal, 297) ->
+ 300;
+yeccgoto(tail_literal, 304) ->
+ 305;
+yeccgoto(tail_pattern, 173) ->
+ 176;
+yeccgoto(tail_pattern, 180) ->
+ 181;
+yeccgoto(timeout, 65) ->
+ 112;
+yeccgoto(timeout, 101) ->
+ 168;
+yeccgoto(try_expr, 33) ->
+ 71;
+yeccgoto(try_expr, 35) ->
+ 71;
+yeccgoto(try_expr, 36) ->
+ 71;
+yeccgoto(try_expr, 37) ->
+ 71;
+yeccgoto(try_expr, 40) ->
+ 71;
+yeccgoto(try_expr, 44) ->
+ 71;
+yeccgoto(try_expr, 46) ->
+ 71;
+yeccgoto(try_expr, 48) ->
+ 71;
+yeccgoto(try_expr, 52) ->
+ 71;
+yeccgoto(try_expr, 70) ->
+ 71;
+yeccgoto(try_expr, 74) ->
+ 71;
+yeccgoto(try_expr, 79) ->
+ 71;
+yeccgoto(try_expr, 86) ->
+ 71;
+yeccgoto(try_expr, 90) ->
+ 71;
+yeccgoto(try_expr, 99) ->
+ 71;
+yeccgoto(try_expr, 164) ->
+ 71;
+yeccgoto(try_expr, 166) ->
+ 71;
+yeccgoto(try_expr, 171) ->
+ 71;
+yeccgoto(try_expr, 203) ->
+ 71;
+yeccgoto(try_expr, 211) ->
+ 71;
+yeccgoto(try_expr, 214) ->
+ 71;
+yeccgoto(try_expr, 216) ->
+ 71;
+yeccgoto(try_expr, 218) ->
+ 71;
+yeccgoto(try_expr, 226) ->
+ 71;
+yeccgoto(try_expr, 232) ->
+ 71;
+yeccgoto(try_expr, 235) ->
+ 71;
+yeccgoto(try_expr, 257) ->
+ 71;
+yeccgoto(try_expr, 260) ->
+ 71;
+yeccgoto(try_expr, 265) ->
+ 71;
+yeccgoto(tuple, 33) ->
+ 72;
+yeccgoto(tuple, 35) ->
+ 72;
+yeccgoto(tuple, 36) ->
+ 72;
+yeccgoto(tuple, 37) ->
+ 72;
+yeccgoto(tuple, 40) ->
+ 72;
+yeccgoto(tuple, 44) ->
+ 72;
+yeccgoto(tuple, 46) ->
+ 72;
+yeccgoto(tuple, 48) ->
+ 72;
+yeccgoto(tuple, 52) ->
+ 72;
+yeccgoto(tuple, 70) ->
+ 72;
+yeccgoto(tuple, 74) ->
+ 72;
+yeccgoto(tuple, 79) ->
+ 72;
+yeccgoto(tuple, 86) ->
+ 72;
+yeccgoto(tuple, 90) ->
+ 72;
+yeccgoto(tuple, 99) ->
+ 72;
+yeccgoto(tuple, 164) ->
+ 72;
+yeccgoto(tuple, 166) ->
+ 72;
+yeccgoto(tuple, 171) ->
+ 72;
+yeccgoto(tuple, 203) ->
+ 72;
+yeccgoto(tuple, 211) ->
+ 72;
+yeccgoto(tuple, 214) ->
+ 72;
+yeccgoto(tuple, 216) ->
+ 72;
+yeccgoto(tuple, 218) ->
+ 72;
+yeccgoto(tuple, 226) ->
+ 72;
+yeccgoto(tuple, 232) ->
+ 72;
+yeccgoto(tuple, 235) ->
+ 72;
+yeccgoto(tuple, 257) ->
+ 72;
+yeccgoto(tuple, 260) ->
+ 72;
+yeccgoto(tuple, 265) ->
+ 72;
+yeccgoto(tuple_constant, 126) ->
+ 141;
+yeccgoto(tuple_constant, 129) ->
+ 141;
+yeccgoto(tuple_constant, 142) ->
+ 141;
+yeccgoto(tuple_constant, 147) ->
+ 141;
+yeccgoto(tuple_constant, 151) ->
+ 141;
+yeccgoto(tuple_constant, 154) ->
+ 141;
+yeccgoto(tuple_literal, 284) ->
+ 289;
+yeccgoto(tuple_literal, 285) ->
+ 289;
+yeccgoto(tuple_literal, 290) ->
+ 289;
+yeccgoto(tuple_literal, 295) ->
+ 289;
+yeccgoto(tuple_literal, 298) ->
+ 289;
+yeccgoto(tuple_literal, 301) ->
+ 289;
+yeccgoto(tuple_pattern, 65) ->
+ 113;
+yeccgoto(tuple_pattern, 96) ->
+ 113;
+yeccgoto(tuple_pattern, 97) ->
+ 113;
+yeccgoto(tuple_pattern, 98) ->
+ 113;
+yeccgoto(tuple_pattern, 100) ->
+ 113;
+yeccgoto(tuple_pattern, 114) ->
+ 113;
+yeccgoto(tuple_pattern, 115) ->
+ 113;
+yeccgoto(tuple_pattern, 120) ->
+ 113;
+yeccgoto(tuple_pattern, 162) ->
+ 113;
+yeccgoto(tuple_pattern, 174) ->
+ 113;
+yeccgoto(tuple_pattern, 177) ->
+ 113;
+yeccgoto(tuple_pattern, 200) ->
+ 113;
+yeccgoto(tuple_pattern, 222) ->
+ 113;
+yeccgoto(variable, 25) ->
+ 31;
+yeccgoto(variable, 26) ->
+ 267;
+yeccgoto(variable, 33) ->
+ 73;
+yeccgoto(variable, 35) ->
+ 73;
+yeccgoto(variable, 36) ->
+ 73;
+yeccgoto(variable, 37) ->
+ 73;
+yeccgoto(variable, 40) ->
+ 73;
+yeccgoto(variable, 44) ->
+ 73;
+yeccgoto(variable, 46) ->
+ 73;
+yeccgoto(variable, 48) ->
+ 73;
+yeccgoto(variable, 52) ->
+ 73;
+yeccgoto(variable, 58) ->
+ 31;
+yeccgoto(variable, 65) ->
+ 31;
+yeccgoto(variable, 70) ->
+ 73;
+yeccgoto(variable, 74) ->
+ 73;
+yeccgoto(variable, 79) ->
+ 73;
+yeccgoto(variable, 82) ->
+ 31;
+yeccgoto(variable, 83) ->
+ 31;
+yeccgoto(variable, 86) ->
+ 73;
+yeccgoto(variable, 88) ->
+ 31;
+yeccgoto(variable, 90) ->
+ 73;
+yeccgoto(variable, 96) ->
+ 124;
+yeccgoto(variable, 97) ->
+ 31;
+yeccgoto(variable, 98) ->
+ 31;
+yeccgoto(variable, 99) ->
+ 73;
+yeccgoto(variable, 100) ->
+ 31;
+yeccgoto(variable, 114) ->
+ 31;
+yeccgoto(variable, 115) ->
+ 124;
+yeccgoto(variable, 120) ->
+ 31;
+yeccgoto(variable, 162) ->
+ 31;
+yeccgoto(variable, 164) ->
+ 73;
+yeccgoto(variable, 166) ->
+ 73;
+yeccgoto(variable, 171) ->
+ 73;
+yeccgoto(variable, 174) ->
+ 31;
+yeccgoto(variable, 177) ->
+ 31;
+yeccgoto(variable, 200) ->
+ 31;
+yeccgoto(variable, 203) ->
+ 73;
+yeccgoto(variable, 211) ->
+ 73;
+yeccgoto(variable, 214) ->
+ 73;
+yeccgoto(variable, 216) ->
+ 73;
+yeccgoto(variable, 218) ->
+ 73;
+yeccgoto(variable, 222) ->
+ 31;
+yeccgoto(variable, 226) ->
+ 73;
+yeccgoto(variable, 232) ->
+ 73;
+yeccgoto(variable, 235) ->
+ 73;
+yeccgoto(variable, 257) ->
+ 73;
+yeccgoto(variable, 260) ->
+ 73;
+yeccgoto(variable, 263) ->
+ 31;
+yeccgoto(variable, 265) ->
+ 73;
+yeccgoto(__Symbol, __State) ->
+ exit({__Symbol, __State, missing_in_goto_table}).
+
+
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.hrl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.hrl
new file mode 100644
index 0000000000..aaf913a15a
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_parse.hrl
@@ -0,0 +1,111 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: core_parse.hrl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose : Core Erlang syntax trees as records.
+
+%% It would be nice to incorporate some generic functions as well but
+%% this could make including this file difficult.
+
+%% Note: the annotation list is *always* the first record field.
+%% Thus it is possible to define the macros:
+%% -define(get_ann(X), element(2, X)).
+%% -define(set_ann(X, Y), setelement(2, X, Y)).
+
+-record(c_int, {anno=[], val}). % val :: integer()
+
+-record(c_float, {anno=[], val}). % val :: float()
+
+-record(c_atom, {anno=[], val}). % val :: atom()
+
+-record(c_char, {anno=[], val}). % val :: char()
+
+-record(c_string, {anno=[], val}). % val :: string()
+
+-record(c_nil, {anno=[]}).
+
+-record(c_binary, {anno=[], segments}). % segments :: [#ce_bitstr{}]
+
+-record(c_bitstr, {anno=[],val, % val :: Tree,
+ size, % size :: Tree,
+ unit, % unit :: integer(),
+ type, % type :: atom(),
+ flags}). % flags :: [atom()],
+
+-record(c_cons, {anno=[], hd, % hd :: Tree,
+ tl}). % tl :: Tree
+
+-record(c_tuple, {anno=[], es}). % es :: [Tree]
+
+-record(c_var, {anno=[], name}). % name :: integer() | atom()
+
+-record(c_fname, {anno=[], id, % id :: atom(),
+ arity}). % arity :: integer()
+
+-record(c_values, {anno=[], es}). % es :: [Tree]
+
+-record(c_fun, {anno=[], vars, % vars :: [Tree],
+ body}). % body :: Tree
+
+-record(c_seq, {anno=[], arg, % arg :: Tree,
+ body}). % body :: Tree
+
+-record(c_let, {anno=[], vars, % vars :: [Tree],
+ arg, % arg :: Tree,
+ body}). % body :: Tree
+
+-record(c_letrec, {anno=[], defs, % defs :: [#ce_def{}],
+ body}). % body :: Tree
+
+-record(c_def, {anno=[], name, % name :: Tree,
+ val}). % val :: Tree,
+
+-record(c_case, {anno=[], arg, % arg :: Tree,
+ clauses}). % clauses :: [Tree]
+
+-record(c_clause, {anno=[], pats, % pats :: [Tree],
+ guard, % guard :: Tree,
+ body}). % body :: Tree
+
+-record(c_alias, {anno=[], var, % var :: Tree,
+ pat}). % pat :: Tree
+
+-record(c_receive, {anno=[], clauses, % clauses :: [Tree],
+ timeout, % timeout :: Tree,
+ action}). % action :: Tree
+
+-record(c_apply, {anno=[], op, % op :: Tree,
+ args}). % args :: [Tree]
+
+-record(c_call, {anno=[], module, % module :: Tree,
+ name, % name :: Tree,
+ args}). % args :: [Tree]
+
+-record(c_primop, {anno=[], name, % name :: Tree,
+ args}). % args :: [Tree]
+
+-record(c_try, {anno=[], arg, % arg :: Tree,
+ vars, % vars :: [Tree],
+ body, % body :: Tree
+ evars, % evars :: [Tree],
+ handler}). % handler :: Tree
+
+-record(c_catch, {anno=[], body}). % body :: Tree
+
+-record(c_module, {anno=[], name, % name :: Tree,
+ exports, % exports :: [Tree],
+ attrs, % attrs :: [#ce_def{}],
+ defs}). % defs :: [#ce_def{}]
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_pp.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_pp.erl
new file mode 100644
index 0000000000..147a0dba6c
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_pp.erl
@@ -0,0 +1,430 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: core_pp.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose : Core Erlang (naive) prettyprinter
+
+-module(core_pp).
+
+-export([format/1]).
+
+-include("core_parse.hrl").
+
+%% ====================================================================== %%
+%% format(Node) -> Text
+%% Node = coreErlang()
+%% Text = string() | [Text]
+%%
+%% Prettyprint-formats (naively) an abstract Core Erlang syntax
+%% tree.
+
+-record(ctxt, {class = term,
+ indent = 0,
+ item_indent = 2,
+ body_indent = 4,
+ tab_width = 8,
+ line = 0}).
+
+format(Node) -> case catch format(Node, #ctxt{}) of
+ {'EXIT',_} -> io_lib:format("~p",[Node]);
+ Other -> Other
+ end.
+
+maybe_anno(Node, Fun, Ctxt) ->
+ As = core_lib:get_anno(Node),
+ case get_line(As) of
+ none ->
+ maybe_anno(Node, Fun, Ctxt, As);
+ Line ->
+ if Line > Ctxt#ctxt.line ->
+ [io_lib:format("%% Line ~w",[Line]),
+ nl_indent(Ctxt),
+ maybe_anno(Node, Fun, Ctxt#ctxt{line = Line}, As)
+ ];
+ true ->
+ maybe_anno(Node, Fun, Ctxt, As)
+ end
+ end.
+
+maybe_anno(Node, Fun, Ctxt, As) ->
+ case strip_line(As) of
+ [] ->
+ Fun(Node, Ctxt);
+ List ->
+ Ctxt1 = add_indent(Ctxt, 2),
+ Ctxt2 = add_indent(Ctxt1, 3),
+ ["( ",
+ Fun(Node, Ctxt1),
+ nl_indent(Ctxt1),
+ "-| ",format_1(core_lib:make_literal(List), Ctxt2)," )"
+ ]
+ end.
+
+strip_line([A | As]) when integer(A) ->
+ strip_line(As);
+strip_line([A | As]) ->
+ [A | strip_line(As)];
+strip_line([]) ->
+ [].
+
+get_line([L | _As]) when integer(L) ->
+ L;
+get_line([_ | As]) ->
+ get_line(As);
+get_line([]) ->
+ none.
+
+format(Node, Ctxt) ->
+ maybe_anno(Node, fun format_1/2, Ctxt).
+
+format_1(#c_char{val=C}, _) -> io_lib:write_char(C);
+format_1(#c_int{val=I}, _) -> integer_to_list(I);
+format_1(#c_float{val=F}, _) -> float_to_list(F);
+format_1(#c_atom{val=A}, _) -> core_atom(A);
+format_1(#c_nil{}, _) -> "[]";
+format_1(#c_string{val=S}, _) -> io_lib:write_string(S);
+format_1(#c_var{name=V}, _) ->
+ %% Internal variable names may be:
+ %% - atoms representing proper Erlang variable names, or
+ %% any atoms that may be printed without single-quoting
+ %% - nonnegative integers.
+ %% It is important that when printing variables, no two names
+ %% should ever map to the same string.
+ if atom(V) ->
+ S = atom_to_list(V),
+ case S of
+ [C | _] when C >= $A, C =< $Z ->
+ %% Ordinary uppercase-prefixed names are
+ %% printed just as they are.
+ S;
+ [$_ | _] ->
+ %% Already "_"-prefixed names are prefixed
+ %% with "_X", e.g. '_foo' => '_X_foo', to
+ %% avoid generating things like "____foo" upon
+ %% repeated writing and reading of code.
+ %% ("_X_X_X_foo" is better.)
+ [$_, $X | S];
+ _ ->
+ %% Plain atoms are prefixed with a single "_".
+ %% E.g. foo => "_foo".
+ [$_ | S]
+ end;
+ integer(V) ->
+ %% Integers are also simply prefixed with "_".
+ [$_ | integer_to_list(V)]
+ end;
+format_1(#c_binary{segments=Segs}, Ctxt) ->
+ ["#{",
+ format_vseq(Segs, "", ",", add_indent(Ctxt, 2),
+ fun format_bitstr/2),
+ "}#"
+ ];
+format_1(#c_tuple{es=Es}, Ctxt) ->
+ [${,
+ format_hseq(Es, ",", add_indent(Ctxt, 1), fun format/2),
+ $}
+ ];
+format_1(#c_cons{hd=H,tl=T}, Ctxt) ->
+ Txt = ["["|format(H, add_indent(Ctxt, 1))],
+ [Txt|format_list_tail(T, add_indent(Ctxt, width(Txt, Ctxt)))];
+format_1(#c_values{es=Es}, Ctxt) ->
+ format_values(Es, Ctxt);
+format_1(#c_alias{var=V,pat=P}, Ctxt) ->
+ Txt = [format(V, Ctxt)|" = "],
+ [Txt|format(P, add_indent(Ctxt, width(Txt, Ctxt)))];
+format_1(#c_let{vars=Vs,arg=A,body=B}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["let ",
+ format_values(Vs, add_indent(Ctxt, 4)),
+ " =",
+ nl_indent(Ctxt1),
+ format(A, Ctxt1),
+ nl_indent(Ctxt),
+ "in "
+ | format(B, add_indent(Ctxt, 4))
+ ];
+format_1(#c_letrec{defs=Fs,body=B}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["letrec",
+ nl_indent(Ctxt1),
+ format_funcs(Fs, Ctxt1),
+ nl_indent(Ctxt),
+ "in "
+ | format(B, add_indent(Ctxt, 4))
+ ];
+format_1(#c_seq{arg=A,body=B}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, 4),
+ ["do ",
+ format(A, Ctxt1),
+ nl_indent(Ctxt1)
+ | format(B, Ctxt1)
+ ];
+format_1(#c_case{arg=A,clauses=Cs}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.item_indent),
+ ["case ",
+ format(A, add_indent(Ctxt, 5)),
+ " of",
+ nl_indent(Ctxt1),
+ format_clauses(Cs, Ctxt1),
+ nl_indent(Ctxt)
+ | "end"
+ ];
+format_1(#c_receive{clauses=Cs,timeout=T,action=A}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.item_indent),
+ ["receive",
+ nl_indent(Ctxt1),
+ format_clauses(Cs, Ctxt1),
+ nl_indent(Ctxt),
+ "after ",
+ format(T, add_indent(Ctxt, 6)),
+ " ->",
+ nl_indent(Ctxt1),
+ format(A, Ctxt1)
+ ];
+format_1(#c_fname{id=I,arity=A}, _) ->
+ [core_atom(I),$/,integer_to_list(A)];
+format_1(#c_fun{vars=Vs,body=B}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["fun (",
+ format_hseq(Vs, ",", add_indent(Ctxt, 5), fun format/2),
+ ") ->",
+ nl_indent(Ctxt1)
+ | format(B, Ctxt1)
+ ];
+format_1(#c_apply{op=O,args=As}, Ctxt0) ->
+ Ctxt1 = add_indent(Ctxt0, 6), %"apply "
+ Op = format(O, Ctxt1),
+ Ctxt2 = add_indent(Ctxt0, 4),
+ ["apply ",Op,
+ nl_indent(Ctxt2),
+ $(,format_hseq(As, ", ", add_indent(Ctxt2, 1), fun format/2),$)
+ ];
+format_1(#c_call{module=M,name=N,args=As}, Ctxt0) ->
+ Ctxt1 = add_indent(Ctxt0, 5), %"call "
+ Mod = format(M, Ctxt1),
+ Ctxt2 = add_indent(Ctxt1, width(Mod, Ctxt1)+1),
+ Name = format(N, Ctxt2),
+ Ctxt3 = add_indent(Ctxt0, 4),
+ ["call ",Mod,":",Name,
+ nl_indent(Ctxt3),
+ $(,format_hseq(As, ", ", add_indent(Ctxt3, 1), fun format/2),$)
+ ];
+format_1(#c_primop{name=N,args=As}, Ctxt0) ->
+ Ctxt1 = add_indent(Ctxt0, 7), %"primop "
+ Name = format(N, Ctxt1),
+ Ctxt2 = add_indent(Ctxt0, 4),
+ ["primop ",Name,
+ nl_indent(Ctxt2),
+ $(,format_hseq(As, ", ", add_indent(Ctxt2, 1), fun format/2),$)
+ ];
+format_1(#c_catch{body=B}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["catch",
+ nl_indent(Ctxt1),
+ format(B, Ctxt1)
+ ];
+format_1(#c_try{arg=E,vars=Vs,body=B,evars=Evs,handler=H}, Ctxt) ->
+ Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["try",
+ nl_indent(Ctxt1),
+ format(E, Ctxt1),
+ nl_indent(Ctxt),
+ "of ",
+ format_values(Vs, add_indent(Ctxt, 3)),
+ " ->",
+ nl_indent(Ctxt1),
+ format(B, Ctxt1),
+ nl_indent(Ctxt),
+ "catch ",
+ format_values(Evs, add_indent(Ctxt, 6)),
+ " ->",
+ nl_indent(Ctxt1)
+ | format(H, Ctxt1)
+ ];
+format_1(#c_def{name=N,val=V}, Ctxt) ->
+ Ctxt1 = add_indent(set_class(Ctxt, expr), Ctxt#ctxt.body_indent),
+ [format(N, Ctxt),
+ " =",
+ nl_indent(Ctxt1)
+ | format(V, Ctxt1)
+ ];
+format_1(#c_module{name=N,exports=Es,attrs=As,defs=Ds}, Ctxt) ->
+ Mod = ["module ", format(N, Ctxt)],
+ [Mod," [",
+ format_vseq(Es,
+ "", ",",
+ add_indent(set_class(Ctxt, term), width(Mod, Ctxt)+2),
+ fun format/2),
+ "]",
+ nl_indent(Ctxt),
+ " attributes [",
+ format_vseq(As,
+ "", ",",
+ add_indent(set_class(Ctxt, def), 16),
+ fun format/2),
+ "]",
+ nl_indent(Ctxt),
+ format_funcs(Ds, Ctxt),
+ nl_indent(Ctxt)
+ | "end"
+ ];
+format_1(Type, _) ->
+ ["** Unsupported type: ",
+ io_lib:write(Type)
+ | " **"
+ ].
+
+format_funcs(Fs, Ctxt) ->
+ format_vseq(Fs,
+ "", "",
+ set_class(Ctxt, def),
+ fun format/2).
+
+format_values(Vs, Ctxt) ->
+ [$<,
+ format_hseq(Vs, ",", add_indent(Ctxt, 1), fun format/2),
+ $>].
+
+format_bitstr(#c_bitstr{val=V,size=S,unit=U,type=T,flags=Fs}, Ctxt0) ->
+ Vs = [S, U, T, Fs],
+ Ctxt1 = add_indent(Ctxt0, 2),
+ Val = format(V, Ctxt1),
+ Ctxt2 = add_indent(Ctxt1, width(Val, Ctxt1) + 2),
+ ["#<", Val, ">(", format_hseq(Vs,",", Ctxt2, fun format/2), $)].
+
+format_clauses(Cs, Ctxt) ->
+ format_vseq(Cs, "", "", set_class(Ctxt, clause),
+ fun format_clause/2).
+
+format_clause(Node, Ctxt) ->
+ maybe_anno(Node, fun format_clause_1/2, Ctxt).
+
+format_clause_1(#c_clause{pats=Ps,guard=G,body=B}, Ctxt) ->
+ Ptxt = format_values(Ps, Ctxt),
+ Ctxt2 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
+ [Ptxt,
+ " when ",
+ format_guard(G, add_indent(set_class(Ctxt, expr),
+ width(Ptxt, Ctxt) + 6)),
+ " ->",
+ nl_indent(Ctxt2)
+ | format(B, set_class(Ctxt2, expr))
+ ].
+
+format_guard(Node, Ctxt) ->
+ maybe_anno(Node, fun format_guard_1/2, Ctxt).
+
+format_guard_1(#c_call{module=M,name=N,args=As}, Ctxt0) ->
+ Ctxt1 = add_indent(Ctxt0, 5), %"call "
+ Mod = format(M, Ctxt1),
+ Ctxt2 = add_indent(Ctxt1, width(Mod, Ctxt1)+1),
+ Name = format(N, Ctxt2),
+ Ctxt3 = add_indent(Ctxt0, 4),
+ ["call ",Mod,":",Name,
+ nl_indent(Ctxt3),
+ $(,format_vseq(As, "",",", add_indent(Ctxt3, 1), fun format_guard/2),$)
+ ];
+format_guard_1(E, Ctxt) -> format_1(E, Ctxt). %Anno already done
+
+%% format_hseq([Thing], Separator, Context, Fun) -> Txt.
+%% Format a sequence horizontally on the same line with Separator between.
+
+format_hseq([H], _, Ctxt, Fun) ->
+ Fun(H, Ctxt);
+format_hseq([H|T], Sep, Ctxt, Fun) ->
+ Txt = [Fun(H, Ctxt)|Sep],
+ Ctxt1 = add_indent(Ctxt, width(Txt, Ctxt)),
+ [Txt|format_hseq(T, Sep, Ctxt1, Fun)];
+format_hseq([], _, _, _) -> "".
+
+%% format_vseq([Thing], LinePrefix, LineSuffix, Context, Fun) -> Txt.
+%% Format a sequence vertically in indented lines adding LinePrefix
+%% to the beginning of each line and LineSuffix to the end of each
+%% line. No prefix on the first line or suffix on the last line.
+
+format_vseq([H], _Pre, _Suf, Ctxt, Fun) ->
+ Fun(H, Ctxt);
+format_vseq([H|T], Pre, Suf, Ctxt, Fun) ->
+ [Fun(H, Ctxt),Suf,nl_indent(Ctxt),Pre|
+ format_vseq(T, Pre, Suf, Ctxt, Fun)];
+format_vseq([], _, _, _, _) -> "".
+
+format_list_tail(#c_nil{anno=[]}, _) -> "]";
+format_list_tail(#c_cons{anno=[],hd=H,tl=T}, Ctxt) ->
+ Txt = [$,|format(H, Ctxt)],
+ Ctxt1 = add_indent(Ctxt, width(Txt, Ctxt)),
+ [Txt|format_list_tail(T, Ctxt1)];
+format_list_tail(Tail, Ctxt) ->
+ ["|",format(Tail, add_indent(Ctxt, 1)),"]"].
+
+indent(Ctxt) -> indent(Ctxt#ctxt.indent, Ctxt).
+
+indent(N, _) when N =< 0 -> "";
+indent(N, Ctxt) ->
+ T = Ctxt#ctxt.tab_width,
+ string:chars($\t, N div T, string:chars($\s, N rem T)).
+
+nl_indent(Ctxt) -> [$\n|indent(Ctxt)].
+
+
+unindent(T, Ctxt) ->
+ unindent(T, Ctxt#ctxt.indent, Ctxt, []).
+
+unindent(T, N, _, C) when N =< 0 ->
+ [T|C];
+unindent([$\s|T], N, Ctxt, C) ->
+ unindent(T, N - 1, Ctxt, C);
+unindent([$\t|T], N, Ctxt, C) ->
+ Tab = Ctxt#ctxt.tab_width,
+ if N >= Tab ->
+ unindent(T, N - Tab, Ctxt, C);
+ true ->
+ unindent([string:chars($\s, Tab - N)|T], 0, Ctxt, C)
+ end;
+unindent([L|T], N, Ctxt, C) when list(L) ->
+ unindent(L, N, Ctxt, [T|C]);
+unindent([H|T], _, _, C) ->
+ [H|[T|C]];
+unindent([], N, Ctxt, [H|T]) ->
+ unindent(H, N, Ctxt, T);
+unindent([], _, _, []) -> [].
+
+
+width(Txt, Ctxt) ->
+ case catch width(Txt, 0, Ctxt, []) of
+ {'EXIT',_} -> exit({bad_text,Txt});
+ Other -> Other
+ end.
+
+width([$\t|T], A, Ctxt, C) ->
+ width(T, A + Ctxt#ctxt.tab_width, Ctxt, C);
+width([$\n|T], _, Ctxt, C) ->
+ width(unindent([T|C], Ctxt), Ctxt);
+width([H|T], A, Ctxt, C) when list(H) ->
+ width(H, A, Ctxt, [T|C]);
+width([_|T], A, Ctxt, C) ->
+ width(T, A + 1, Ctxt, C);
+width([], A, Ctxt, [H|T]) ->
+ width(H, A, Ctxt, T);
+width([], A, _, []) -> A.
+
+add_indent(Ctxt, Dx) ->
+ Ctxt#ctxt{indent = Ctxt#ctxt.indent + Dx}.
+
+set_class(Ctxt, Class) ->
+ Ctxt#ctxt{class = Class}.
+
+core_atom(A) -> io_lib:write_string(atom_to_list(A), $').
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_scan.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_scan.erl
new file mode 100644
index 0000000000..f53c3c1631
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/core_scan.erl
@@ -0,0 +1,495 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: core_scan.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose: Scanner for Core Erlang.
+
+%% For handling ISO 8859-1 (Latin-1) we use the following type
+%% information:
+%%
+%% 000 - 037 NUL - US control
+%% 040 - 057 SPC - / punctuation
+%% 060 - 071 0 - 9 digit
+%% 072 - 100 : - @ punctuation
+%% 101 - 132 A - Z uppercase
+%% 133 - 140 [ - ` punctuation
+%% 141 - 172 a - z lowercase
+%% 173 - 176 { - ~ punctuation
+%% 177 DEL control
+%% 200 - 237 control
+%% 240 - 277 NBSP - � punctuation
+%% 300 - 326 � - � uppercase
+%% 327 � punctuation
+%% 330 - 336 � - � uppercase
+%% 337 - 366 � - � lowercase
+%% 367 � punctuation
+%% 370 - 377 � - � lowercase
+%%
+%% Many punctuation characters region have special meaning. Must
+%% watch using � \327, bvery close to x \170
+
+-module(core_scan).
+
+-export([string/1,string/2,tokens/3,format_error/1]).
+
+-import(lists, [reverse/1]).
+
+%% tokens(Continuation, CharList, StartPos) ->
+%% {done, {ok, [Tok], EndPos}, Rest} |
+%% {done, {error,{ErrorPos,core_scan,What}, EndPos}, Rest} |
+%% {more, Continuation'}
+%% This is the main function into the re-entrant scanner. It calls the
+%% re-entrant pre-scanner until this says done, then calls scan/1 on
+%% the result.
+%%
+%% The continuation has the form:
+%% {RestChars,CharsSoFar,CurrentPos,StartPos}
+
+tokens([], Chars, Pos) -> %First call
+ tokens({[],[],Pos,Pos}, Chars, Pos);
+tokens({Chars,SoFar0,Cp,Sp}, MoreChars, _) ->
+ In = Chars ++ MoreChars,
+ case pre_scan(In, SoFar0, Cp) of
+ {done,_,[],Ep} -> %Found nothing
+ {done,{eof,Ep},[]};
+ {done,_,SoFar1,Ep} -> %Got complete tokens
+ Res = case scan(reverse(SoFar1), Sp) of
+ {ok,Toks} -> {ok,Toks,Ep};
+ {error,E} -> {error,E,Ep}
+ end,
+ {done,Res,[]};
+ {more,Rest,SoFar1,Cp1} -> %Missing end token
+ {more,{Rest,SoFar1,Cp1,Sp}};
+ Other -> %An error has occurred
+ {done,Other,[]}
+ end.
+
+%% string([Char]) ->
+%% string([Char], StartPos) ->
+%% {ok, [Tok], EndPos} |
+%% {error,{Pos,core_scan,What}, EndPos}
+
+string(Cs) -> string(Cs, 1).
+
+string(Cs, Sp) ->
+ %% Add an 'eof' to always get correct handling.
+ case string_pre_scan(Cs, [], Sp) of
+ {done,_,SoFar,Ep} -> %Got tokens
+ case scan(reverse(SoFar), Sp) of
+ {ok,Toks} -> {ok,Toks,Ep};
+ {error,E} -> {error,E,Ep}
+ end;
+ Other -> Other %An error has occurred
+ end.
+
+%% string_pre_scan(Cs, SoFar0, StartPos) ->
+%% {done,Rest,SoFar,EndPos} | {error,E,EndPos}.
+
+string_pre_scan(Cs, SoFar0, Sp) ->
+ case pre_scan(Cs, SoFar0, Sp) of
+ {done,Rest,SoFar1,Ep} -> %Got complete tokens
+ {done,Rest,SoFar1,Ep};
+ {more,Rest,SoFar1,Ep} -> %Missing end token
+ string_pre_scan(Rest ++ eof, SoFar1, Ep);
+ Other -> Other %An error has occurred
+ end.
+
+%% format_error(Error)
+%% Return a string describing the error.
+
+format_error({string,Quote,Head}) ->
+ ["unterminated " ++ string_thing(Quote) ++
+ " starting with " ++ io_lib:write_string(Head,Quote)];
+format_error({illegal,Type}) -> io_lib:fwrite("illegal ~w", [Type]);
+format_error(char) -> "unterminated character";
+format_error(scan) -> "premature end";
+format_error({base,Base}) -> io_lib:fwrite("illegal base '~w'", [Base]);
+format_error(float) -> "bad float";
+format_error(Other) -> io_lib:write(Other).
+
+string_thing($') -> "atom";
+string_thing($") -> "string".
+
+%% Re-entrant pre-scanner.
+%%
+%% If the input list of characters is insufficient to build a term the
+%% scanner returns a request for more characters and a continuation to be
+%% used when trying to build a term with more characters. To indicate
+%% end-of-file the input character list should be replaced with 'eof'
+%% as an empty list has meaning.
+%%
+%% When more characters are need inside a comment, string or quoted
+%% atom, which can become rather long, instead of pushing the
+%% characters read so far back onto RestChars to be reread, a special
+%% reentry token is returned indicating the middle of a construct.
+%% The token is the start character as an atom, '%', '"' and '\''.
+
+%% pre_scan([Char], SoFar, StartPos) ->
+%% {done,RestChars,ScannedChars,NewPos} |
+%% {more,RestChars,ScannedChars,NewPos} |
+%% {error,{ErrorPos,core_scan,Description},NewPos}.
+%% Main pre-scan function. It has been split into 2 functions because of
+%% efficiency, with a good indexing compiler it would be unnecessary.
+
+pre_scan([C|Cs], SoFar, Pos) ->
+ pre_scan(C, Cs, SoFar, Pos);
+pre_scan([], SoFar, Pos) ->
+ {more,[],SoFar,Pos};
+pre_scan(eof, SoFar, Pos) ->
+ {done,eof,SoFar,Pos}.
+
+%% pre_scan(Char, [Char], SoFar, Pos)
+
+pre_scan($$, Cs0, SoFar0, Pos) ->
+ case pre_char(Cs0, [$$|SoFar0]) of
+ {Cs,SoFar} ->
+ pre_scan(Cs, SoFar, Pos);
+ more ->
+ {more,[$$|Cs0],SoFar0, Pos};
+ error ->
+ pre_error(char, Pos, Pos)
+ end;
+pre_scan($', Cs, SoFar, Pos) ->
+ pre_string(Cs, $', '\'', Pos, [$'|SoFar], Pos);
+pre_scan({'\'',Sp}, Cs, SoFar, Pos) -> %Re-entering quoted atom
+ pre_string(Cs, $', '\'', Sp, SoFar, Pos);
+pre_scan($", Cs, SoFar, Pos) ->
+ pre_string(Cs, $", '"', Pos, [$"|SoFar], Pos);
+pre_scan({'"',Sp}, Cs, SoFar, Pos) -> %Re-entering string
+ pre_string(Cs, $", '"', Sp, SoFar, Pos);
+pre_scan($%, Cs, SoFar, Pos) ->
+ pre_comment(Cs, SoFar, Pos);
+pre_scan('%', Cs, SoFar, Pos) -> %Re-entering comment
+ pre_comment(Cs, SoFar, Pos);
+pre_scan($\n, Cs, SoFar, Pos) ->
+ pre_scan(Cs, [$\n|SoFar], Pos+1);
+pre_scan(C, Cs, SoFar, Pos) ->
+ pre_scan(Cs, [C|SoFar], Pos).
+
+%% pre_string([Char], Quote, Reent, StartPos, SoFar, Pos)
+
+pre_string([Q|Cs], Q, _, _, SoFar, Pos) ->
+ pre_scan(Cs, [Q|SoFar], Pos);
+pre_string([$\n|Cs], Q, Reent, Sp, SoFar, Pos) ->
+ pre_string(Cs, Q, Reent, Sp, [$\n|SoFar], Pos+1);
+pre_string([$\\|Cs0], Q, Reent, Sp, SoFar0, Pos) ->
+ case pre_escape(Cs0, SoFar0) of
+ {Cs,SoFar} ->
+ pre_string(Cs, Q, Reent, Sp, SoFar, Pos);
+ more ->
+ {more,[{Reent,Sp},$\\|Cs0],SoFar0,Pos};
+ error ->
+ pre_string_error(Q, Sp, SoFar0, Pos)
+ end;
+pre_string([C|Cs], Q, Reent, Sp, SoFar, Pos) ->
+ pre_string(Cs, Q, Reent, Sp, [C|SoFar], Pos);
+pre_string([], _, Reent, Sp, SoFar, Pos) ->
+ {more,[{Reent,Sp}],SoFar,Pos};
+pre_string(eof, Q, _, Sp, SoFar, Pos) ->
+ pre_string_error(Q, Sp, SoFar, Pos).
+
+pre_string_error(Q, Sp, SoFar, Pos) ->
+ S = reverse(string:substr(SoFar, 1, string:chr(SoFar, Q)-1)),
+ pre_error({string,Q,string:substr(S, 1, 16)}, Sp, Pos).
+
+pre_char([C|Cs], SoFar) -> pre_char(C, Cs, SoFar);
+pre_char([], _) -> more;
+pre_char(eof, _) -> error.
+
+pre_char($\\, Cs, SoFar) ->
+ pre_escape(Cs, SoFar);
+pre_char(C, Cs, SoFar) ->
+ {Cs,[C|SoFar]}.
+
+pre_escape([$^|Cs0], SoFar) ->
+ case Cs0 of
+ [C3|Cs] ->
+ {Cs,[C3,$^,$\\|SoFar]};
+ [] -> more;
+ eof -> error
+ end;
+pre_escape([C|Cs], SoFar) ->
+ {Cs,[C,$\\|SoFar]};
+pre_escape([], _) -> more;
+pre_escape(eof, _) -> error.
+
+%% pre_comment([Char], SoFar, Pos)
+%% Comments are replaced by one SPACE.
+
+pre_comment([$\n|Cs], SoFar, Pos) ->
+ pre_scan(Cs, [$\n,$\s|SoFar], Pos+1); %Terminate comment
+pre_comment([_|Cs], SoFar, Pos) ->
+ pre_comment(Cs, SoFar, Pos);
+pre_comment([], SoFar, Pos) ->
+ {more,['%'],SoFar,Pos};
+pre_comment(eof, Sofar, Pos) ->
+ pre_scan(eof, [$\s|Sofar], Pos).
+
+pre_error(E, Epos, Pos) ->
+ {error,{Epos,core_scan,E}, Pos}.
+
+%% scan(CharList, StartPos)
+%% This takes a list of characters and tries to tokenise them.
+%%
+%% The token list is built in reverse order (in a stack) to save appending
+%% and then reversed when all the tokens have been collected. Most tokens
+%% are built in the same way.
+%%
+%% Returns:
+%% {ok,[Tok]}
+%% {error,{ErrorPos,core_scan,What}}
+
+scan(Cs, Pos) ->
+ scan1(Cs, [], Pos).
+
+%% scan1(Characters, TokenStack, Position)
+%% Scan a list of characters into tokens.
+
+scan1([$\n|Cs], Toks, Pos) -> %Skip newline
+ scan1(Cs, Toks, Pos+1);
+scan1([C|Cs], Toks, Pos) when C >= $\000, C =< $\s -> %Skip control chars
+ scan1(Cs, Toks, Pos);
+scan1([C|Cs], Toks, Pos) when C >= $\200, C =< $\240 ->
+ scan1(Cs, Toks, Pos);
+scan1([C|Cs], Toks, Pos) when C >= $a, C =< $z -> %Keywords
+ scan_key_word(C, Cs, Toks, Pos);
+scan1([C|Cs], Toks, Pos) when C >= $�, C =< $�, C /= $� ->
+ scan_key_word(C, Cs, Toks, Pos);
+scan1([C|Cs], Toks, Pos) when C >= $A, C =< $Z -> %Variables
+ scan_variable(C, Cs, Toks, Pos);
+scan1([C|Cs], Toks, Pos) when C >= $�, C =< $�, C /= $� ->
+ scan_variable(C, Cs, Toks, Pos);
+scan1([C|Cs], Toks, Pos) when C >= $0, C =< $9 -> %Numbers
+ scan_number(C, Cs, Toks, Pos);
+scan1([$-,C|Cs], Toks, Pos) when C >= $0, C =< $9 -> %Signed numbers
+ scan_signed_number($-, C, Cs, Toks, Pos);
+scan1([$+,C|Cs], Toks, Pos) when C >= $0, C =< $9 -> %Signed numbers
+ scan_signed_number($+, C, Cs, Toks, Pos);
+scan1([$_|Cs], Toks, Pos) -> %_ variables
+ scan_variable($_, Cs, Toks, Pos);
+scan1([$$|Cs0], Toks, Pos) -> %Character constant
+ {C,Cs,Pos1} = scan_char(Cs0, Pos),
+ scan1(Cs, [{char,Pos,C}|Toks], Pos1);
+scan1([$'|Cs0], Toks, Pos) -> %Atom (always quoted)
+ {S,Cs1,Pos1} = scan_string(Cs0, $', Pos),
+ case catch list_to_atom(S) of
+ A when atom(A) ->
+ scan1(Cs1, [{atom,Pos,A}|Toks], Pos1);
+ _Error -> scan_error({illegal,atom}, Pos)
+ end;
+scan1([$"|Cs0], Toks, Pos) -> %String
+ {S,Cs1,Pos1} = scan_string(Cs0, $", Pos),
+ scan1(Cs1, [{string,Pos,S}|Toks], Pos1);
+%% Punctuation characters and operators, first recognise multiples.
+scan1("->" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{'->',Pos}|Toks], Pos);
+scan1("-|" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{'-|',Pos}|Toks], Pos);
+scan1([C|Cs], Toks, Pos) -> %Punctuation character
+ P = list_to_atom([C]),
+ scan1(Cs, [{P,Pos}|Toks], Pos);
+scan1([], Toks0, _) ->
+ Toks = reverse(Toks0),
+ {ok,Toks}.
+
+%% scan_key_word(FirstChar, CharList, Tokens, Pos)
+%% scan_variable(FirstChar, CharList, Tokens, Pos)
+
+scan_key_word(C, Cs0, Toks, Pos) ->
+ {Wcs,Cs} = scan_name(Cs0, []),
+ case catch list_to_atom([C|reverse(Wcs)]) of
+ Name when atom(Name) ->
+ scan1(Cs, [{Name,Pos}|Toks], Pos);
+ _Error -> scan_error({illegal,atom}, Pos)
+ end.
+
+scan_variable(C, Cs0, Toks, Pos) ->
+ {Wcs,Cs} = scan_name(Cs0, []),
+ case catch list_to_atom([C|reverse(Wcs)]) of
+ Name when atom(Name) ->
+ scan1(Cs, [{var,Pos,Name}|Toks], Pos);
+ _Error -> scan_error({illegal,var}, Pos)
+ end.
+
+%% scan_name(Cs) -> lists:splitwith(fun (C) -> name_char(C) end, Cs).
+
+scan_name([C|Cs], Ncs) ->
+ case name_char(C) of
+ true -> scan_name(Cs, [C|Ncs]);
+ false -> {Ncs,[C|Cs]} %Must rebuild here, sigh!
+ end;
+scan_name([], Ncs) ->
+ {Ncs,[]}.
+
+name_char(C) when C >= $a, C =< $z -> true;
+name_char(C) when C >= $�, C =< $�, C /= $� -> true;
+name_char(C) when C >= $A, C =< $Z -> true;
+name_char(C) when C >= $�, C =< $�, C /= $� -> true;
+name_char(C) when C >= $0, C =< $9 -> true;
+name_char($_) -> true;
+name_char($@) -> true;
+name_char(_) -> false.
+
+%% scan_string(CharList, QuoteChar, Pos) -> {StringChars,RestChars,NewPos}.
+
+scan_string(Cs, Q, Pos) ->
+ scan_string(Cs, [], Q, Pos).
+
+scan_string([Q|Cs], Scs, Q, Pos) ->
+ {reverse(Scs),Cs,Pos};
+scan_string([$\n|Cs], Scs, Q, Pos) ->
+ scan_string(Cs, [$\n|Scs], Q, Pos+1);
+scan_string([$\\|Cs0], Scs, Q, Pos) ->
+ {C,Cs,Pos1} = scan_escape(Cs0, Pos),
+ scan_string(Cs, [C|Scs], Q, Pos1);
+scan_string([C|Cs], Scs, Q, Pos) ->
+ scan_string(Cs, [C|Scs], Q, Pos).
+
+%% scan_char(Chars, Pos) -> {Char,RestChars,NewPos}.
+%% Read a single character from a character constant. The pre-scan
+%% phase has checked for errors here.
+
+scan_char([$\\|Cs], Pos) ->
+ scan_escape(Cs, Pos);
+scan_char([$\n|Cs], Pos) -> %Newline
+ {$\n,Cs,Pos+1};
+scan_char([C|Cs], Pos) ->
+ {C,Cs,Pos}.
+
+scan_escape([O1,O2,O3|Cs], Pos) when %\<1-3> octal digits
+ O1 >= $0, O1 =< $7, O2 >= $0, O2 =< $7, O3 >= $0, O3 =< $7 ->
+ Val = (O1*8 + O2)*8 + O3 - 73*$0,
+ {Val,Cs,Pos};
+scan_escape([O1,O2|Cs], Pos) when
+ O1 >= $0, O1 =< $7, O2 >= $0, O2 =< $7 ->
+ Val = (O1*8 + O2) - 9*$0,
+ {Val,Cs,Pos};
+scan_escape([O1|Cs], Pos) when
+ O1 >= $0, O1 =< $7 ->
+ {O1 - $0,Cs,Pos};
+scan_escape([$^,C|Cs], Pos) -> %\^X -> CTL-X
+ Val = C band 31,
+ {Val,Cs,Pos};
+%scan_escape([$\n,C1|Cs],Pos) ->
+% {C1,Cs,Pos+1};
+%scan_escape([C,C1|Cs],Pos) when C >= $\000, C =< $\s ->
+% {C1,Cs,Pos};
+scan_escape([$\n|Cs],Pos) ->
+ {$\n,Cs,Pos+1};
+scan_escape([C0|Cs],Pos) ->
+ C = escape_char(C0),
+ {C,Cs,Pos}.
+
+escape_char($n) -> $\n; %\n = LF
+escape_char($r) -> $\r; %\r = CR
+escape_char($t) -> $\t; %\t = TAB
+escape_char($v) -> $\v; %\v = VT
+escape_char($b) -> $\b; %\b = BS
+escape_char($f) -> $\f; %\f = FF
+escape_char($e) -> $\e; %\e = ESC
+escape_char($s) -> $\s; %\s = SPC
+escape_char($d) -> $\d; %\d = DEL
+escape_char(C) -> C.
+
+%% scan_number(Char, CharList, TokenStack, Pos)
+%% We can handle simple radix notation:
+%% <digit>#<digits> - the digits read in that base
+%% <digits> - the digits in base 10
+%% <digits>.<digits>
+%% <digits>.<digits>E+-<digits>
+%%
+%% Except for explicitly based integers we build a list of all the
+%% characters and then use list_to_integer/1 or list_to_float/1 to
+%% generate the value.
+
+%% SPos == Start position
+%% CPos == Current position
+
+scan_number(C, Cs0, Toks, Pos) ->
+ {Ncs,Cs,Pos1} = scan_integer(Cs0, [C], Pos),
+ scan_after_int(Cs, Ncs, Toks, Pos, Pos1).
+
+scan_signed_number(S, C, Cs0, Toks, Pos) ->
+ {Ncs,Cs,Pos1} = scan_integer(Cs0, [C,S], Pos),
+ scan_after_int(Cs, Ncs, Toks, Pos, Pos1).
+
+scan_integer([C|Cs], Stack, Pos) when C >= $0, C =< $9 ->
+ scan_integer(Cs, [C|Stack], Pos);
+scan_integer(Cs, Stack, Pos) ->
+ {Stack,Cs,Pos}.
+
+scan_after_int([$.,C|Cs0], Ncs0, Toks, SPos, CPos) when C >= $0, C =< $9 ->
+ {Ncs,Cs,CPos1} = scan_integer(Cs0, [C,$.|Ncs0], CPos),
+ scan_after_fraction(Cs, Ncs, Toks, SPos, CPos1);
+scan_after_int([$#|Cs], Ncs, Toks, SPos, CPos) ->
+ case list_to_integer(reverse(Ncs)) of
+ Base when Base >= 2, Base =< 16 ->
+ scan_based_int(Cs, 0, Base, Toks, SPos, CPos);
+ Base ->
+ scan_error({base,Base}, CPos)
+ end;
+scan_after_int(Cs, Ncs, Toks, SPos, CPos) ->
+ N = list_to_integer(reverse(Ncs)),
+ scan1(Cs, [{integer,SPos,N}|Toks], CPos).
+
+scan_based_int([C|Cs], SoFar, Base, Toks, SPos, CPos) when
+ C >= $0, C =< $9, C < Base + $0 ->
+ Next = SoFar * Base + (C - $0),
+ scan_based_int(Cs, Next, Base, Toks, SPos, CPos);
+scan_based_int([C|Cs], SoFar, Base, Toks, SPos, CPos) when
+ C >= $a, C =< $f, C < Base + $a - 10 ->
+ Next = SoFar * Base + (C - $a + 10),
+ scan_based_int(Cs, Next, Base, Toks, SPos, CPos);
+scan_based_int([C|Cs], SoFar, Base, Toks, SPos, CPos) when
+ C >= $A, C =< $F, C < Base + $A - 10 ->
+ Next = SoFar * Base + (C - $A + 10),
+ scan_based_int(Cs, Next, Base, Toks, SPos, CPos);
+scan_based_int(Cs, SoFar, _, Toks, SPos, CPos) ->
+ scan1(Cs, [{integer,SPos,SoFar}|Toks], CPos).
+
+scan_after_fraction([$E|Cs], Ncs, Toks, SPos, CPos) ->
+ scan_exponent(Cs, [$E|Ncs], Toks, SPos, CPos);
+scan_after_fraction([$e|Cs], Ncs, Toks, SPos, CPos) ->
+ scan_exponent(Cs, [$E|Ncs], Toks, SPos, CPos);
+scan_after_fraction(Cs, Ncs, Toks, SPos, CPos) ->
+ case catch list_to_float(reverse(Ncs)) of
+ N when float(N) ->
+ scan1(Cs, [{float,SPos,N}|Toks], CPos);
+ _Error -> scan_error({illegal,float}, SPos)
+ end.
+
+%% scan_exponent(CharList, NumberCharStack, TokenStack, StartPos, CurPos)
+%% Generate an error here if E{+|-} not followed by any digits.
+
+scan_exponent([$+|Cs], Ncs, Toks, SPos, CPos) ->
+ scan_exponent1(Cs, [$+|Ncs], Toks, SPos, CPos);
+scan_exponent([$-|Cs], Ncs, Toks, SPos, CPos) ->
+ scan_exponent1(Cs, [$-|Ncs], Toks, SPos, CPos);
+scan_exponent(Cs, Ncs, Toks, SPos, CPos) ->
+ scan_exponent1(Cs, Ncs, Toks, SPos, CPos).
+
+scan_exponent1([C|Cs0], Ncs0, Toks, SPos, CPos) when C >= $0, C =< $9 ->
+ {Ncs,Cs,CPos1} = scan_integer(Cs0, [C|Ncs0], CPos),
+ case catch list_to_float(reverse(Ncs)) of
+ N when float(N) ->
+ scan1(Cs, [{float,SPos,N}|Toks], CPos1);
+ _Error -> scan_error({illegal,float}, SPos)
+ end;
+scan_exponent1(_, _, _, _, CPos) ->
+ scan_error(float, CPos).
+
+scan_error(In, Pos) ->
+ {error,{Pos,core_scan,In}}.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/erl_bifs.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/erl_bifs.erl
new file mode 100644
index 0000000000..088f44f9fd
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/erl_bifs.erl
@@ -0,0 +1,486 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: erl_bifs.erl,v 1.2 2009/09/17 09:46:19 kostis Exp $
+%%
+%% Purpose: Information about the Erlang built-in functions.
+
+-module(erl_bifs).
+
+-export([is_bif/3, is_guard_bif/3, is_pure/3, is_safe/3]).
+
+
+%% =====================================================================
+%% is_bif(Module, Name, Arity) -> boolean()
+%%
+%% Module = Name = atom()
+%% Arity = integer()
+%%
+%% Returns `true' if the function `Module:Name/Arity' is a Built-In
+%% Function (BIF) of Erlang. BIFs "come with the implementation",
+%% and can be assumed to exist and have the same behaviour in any
+%% later versions of the same implementation of the language. Being
+%% a BIF does *not* imply that the function belongs to the module
+%% `erlang', nor that it is implemented in C or assembler (cf.
+%% `erlang:is_builtin/3'), or that it is auto-imported by the
+%% compiler (cf. `erl_internal:bif/3').
+
+is_bif(erlang, '!', 2) -> true;
+is_bif(erlang, '*', 2) -> true;
+is_bif(erlang, '+', 1) -> true;
+is_bif(erlang, '+', 2) -> true;
+is_bif(erlang, '++', 2) -> true;
+is_bif(erlang, '-', 1) -> true;
+is_bif(erlang, '-', 2) -> true;
+is_bif(erlang, '--', 2) -> true;
+is_bif(erlang, '/', 2) -> true;
+is_bif(erlang, '/=', 2) -> true;
+is_bif(erlang, '<', 2) -> true;
+is_bif(erlang, '=/=', 2) -> true;
+is_bif(erlang, '=:=', 2) -> true;
+is_bif(erlang, '=<', 2) -> true;
+is_bif(erlang, '==', 2) -> true;
+is_bif(erlang, '>', 2) -> true;
+is_bif(erlang, '>=', 2) -> true;
+is_bif(erlang, 'and', 2) -> true;
+is_bif(erlang, 'band', 2) -> true;
+is_bif(erlang, 'bnot', 1) -> true;
+is_bif(erlang, 'bor', 2) -> true;
+is_bif(erlang, 'bsl', 2) -> true;
+is_bif(erlang, 'bsr', 2) -> true;
+is_bif(erlang, 'bxor', 2) -> true;
+is_bif(erlang, 'div', 2) -> true;
+is_bif(erlang, 'not', 1) -> true;
+is_bif(erlang, 'or', 2) -> true;
+is_bif(erlang, 'rem', 2) -> true;
+is_bif(erlang, 'xor', 2) -> true;
+is_bif(erlang, abs, 1) -> true;
+is_bif(erlang, append_element, 2) -> true;
+is_bif(erlang, apply, 2) -> true;
+is_bif(erlang, apply, 3) -> true;
+is_bif(erlang, atom_to_list, 1) -> true;
+is_bif(erlang, binary_to_list, 1) -> true;
+is_bif(erlang, binary_to_list, 3) -> true;
+is_bif(erlang, binary_to_term, 1) -> true;
+is_bif(erlang, cancel_timer, 1) -> true;
+is_bif(erlang, concat_binary, 1) -> true;
+is_bif(erlang, date, 0) -> true;
+is_bif(erlang, demonitor, 1) -> true;
+is_bif(erlang, disconnect_node, 1) -> true;
+is_bif(erlang, display, 1) -> true;
+is_bif(erlang, element, 2) -> true;
+is_bif(erlang, erase, 0) -> true;
+is_bif(erlang, erase, 1) -> true;
+is_bif(erlang, error, 1) -> true;
+is_bif(erlang, error, 2) -> true;
+is_bif(erlang, exit, 1) -> true;
+is_bif(erlang, exit, 2) -> true;
+is_bif(erlang, fault, 1) -> true;
+is_bif(erlang, fault, 2) -> true;
+is_bif(erlang, float, 1) -> true;
+is_bif(erlang, float_to_list, 1) -> true;
+is_bif(erlang, fun_info, 1) -> true;
+is_bif(erlang, fun_info, 2) -> true;
+is_bif(erlang, fun_to_list, 1) -> true;
+is_bif(erlang, get, 0) -> true;
+is_bif(erlang, get, 1) -> true;
+is_bif(erlang, get_cookie, 0) -> true;
+is_bif(erlang, get_keys, 1) -> true;
+is_bif(erlang, group_leader, 0) -> true;
+is_bif(erlang, group_leader, 2) -> true;
+is_bif(erlang, halt, 0) -> false;
+is_bif(erlang, halt, 1) -> false;
+is_bif(erlang, hash, 2) -> false;
+is_bif(erlang, hd, 1) -> true;
+is_bif(erlang, info, 1) -> true;
+is_bif(erlang, integer_to_list, 1) -> true;
+is_bif(erlang, is_alive, 0) -> true;
+is_bif(erlang, is_atom, 1) -> true;
+is_bif(erlang, is_binary, 1) -> true;
+is_bif(erlang, is_boolean, 1) -> true;
+is_bif(erlang, is_builtin, 3) -> true;
+is_bif(erlang, is_constant, 1) -> true;
+is_bif(erlang, is_float, 1) -> true;
+is_bif(erlang, is_function, 1) -> true;
+is_bif(erlang, is_integer, 1) -> true;
+is_bif(erlang, is_list, 1) -> true;
+is_bif(erlang, is_number, 1) -> true;
+is_bif(erlang, is_pid, 1) -> true;
+is_bif(erlang, is_port, 1) -> true;
+is_bif(erlang, is_process_alive, 1) -> true;
+is_bif(erlang, is_record, 3) -> true;
+is_bif(erlang, is_reference, 1) -> true;
+is_bif(erlang, is_tuple, 1) -> true;
+is_bif(erlang, length, 1) -> true;
+is_bif(erlang, link, 1) -> true;
+is_bif(erlang, list_to_atom, 1) -> true;
+is_bif(erlang, list_to_binary, 1) -> true;
+is_bif(erlang, list_to_float, 1) -> true;
+is_bif(erlang, list_to_integer, 1) -> true;
+is_bif(erlang, list_to_pid, 1) -> true;
+is_bif(erlang, list_to_tuple, 1) -> true;
+is_bif(erlang, loaded, 0) -> true;
+is_bif(erlang, localtime, 0) -> true;
+is_bif(erlang, localtime_to_universaltime, 1) -> true;
+is_bif(erlang, make_ref, 0) -> true;
+is_bif(erlang, make_tuple, 2) -> true;
+is_bif(erlang, md5, 1) -> true;
+is_bif(erlang, md5_final, 1) -> true;
+is_bif(erlang, md5_init, 0) -> true;
+is_bif(erlang, md5_update, 2) -> true;
+is_bif(erlang, monitor, 2) -> true;
+is_bif(erlang, monitor_node, 2) -> true;
+is_bif(erlang, node, 0) -> true;
+is_bif(erlang, node, 1) -> true;
+is_bif(erlang, nodes, 0) -> true;
+is_bif(erlang, now, 0) -> true;
+is_bif(erlang, open_port, 2) -> true;
+is_bif(erlang, phash, 2) -> true;
+is_bif(erlang, pid_to_list, 1) -> true;
+is_bif(erlang, port_close, 2) -> true;
+is_bif(erlang, port_command, 2) -> true;
+is_bif(erlang, port_connect, 2) -> true;
+is_bif(erlang, port_control, 3) -> true;
+is_bif(erlang, port_info, 2) -> true;
+is_bif(erlang, port_to_list, 1) -> true;
+is_bif(erlang, ports, 0) -> true;
+is_bif(erlang, pre_loaded, 0) -> true;
+is_bif(erlang, process_display, 2) -> true;
+is_bif(erlang, process_flag, 2) -> true;
+is_bif(erlang, process_flag, 3) -> true;
+is_bif(erlang, process_info, 1) -> true;
+is_bif(erlang, process_info, 2) -> true;
+is_bif(erlang, processes, 0) -> true;
+is_bif(erlang, put, 2) -> true;
+is_bif(erlang, read_timer, 1) -> true;
+is_bif(erlang, ref_to_list, 1) -> true;
+is_bif(erlang, register, 2) -> true;
+is_bif(erlang, registered, 0) -> true;
+is_bif(erlang, resume_process, 1) -> true;
+is_bif(erlang, round, 1) -> true;
+is_bif(erlang, self, 0) -> true;
+is_bif(erlang, send_after, 3) -> true;
+is_bif(erlang, set_cookie, 2) -> true;
+is_bif(erlang, setelement, 3) -> true;
+is_bif(erlang, size, 1) -> true;
+is_bif(erlang, spawn, 1) -> true;
+is_bif(erlang, spawn, 2) -> true;
+is_bif(erlang, spawn, 3) -> true;
+is_bif(erlang, spawn, 4) -> true;
+is_bif(erlang, spawn_link, 1) -> true;
+is_bif(erlang, spawn_link, 2) -> true;
+is_bif(erlang, spawn_link, 3) -> true;
+is_bif(erlang, spawn_link, 4) -> true;
+is_bif(erlang, spawn_opt, 4) -> true;
+is_bif(erlang, split_binary, 2) -> true;
+is_bif(erlang, start_timer, 3) -> true;
+is_bif(erlang, statistics, 1) -> true;
+is_bif(erlang, suspend_process, 1) -> true;
+is_bif(erlang, system_flag, 2) -> true;
+is_bif(erlang, system_info, 1) -> true;
+is_bif(erlang, term_to_binary, 1) -> true;
+is_bif(erlang, term_to_binary, 2) -> true;
+is_bif(erlang, throw, 1) -> true;
+is_bif(erlang, time, 0) -> true;
+is_bif(erlang, tl, 1) -> true;
+is_bif(erlang, trace, 3) -> true;
+is_bif(erlang, trace_info, 2) -> true;
+is_bif(erlang, trace_pattern, 2) -> true;
+is_bif(erlang, trace_pattern, 3) -> true;
+is_bif(erlang, trunc, 1) -> true;
+is_bif(erlang, tuple_to_list, 1) -> true;
+is_bif(erlang, universaltime, 0) -> true;
+is_bif(erlang, universaltime_to_localtime, 1) -> true;
+is_bif(erlang, unlink, 1) -> true;
+is_bif(erlang, unregister, 1) -> true;
+is_bif(erlang, whereis, 1) -> true;
+is_bif(erlang, yield, 0) -> true;
+is_bif(lists, append, 2) -> true;
+is_bif(lists, reverse, 1) -> true;
+is_bif(lists, reverse, 2) -> true;
+is_bif(lists, subtract, 2) -> true;
+is_bif(math, acos, 1) -> true;
+is_bif(math, acosh, 1) -> true;
+is_bif(math, asin, 1) -> true;
+is_bif(math, asinh, 1) -> true;
+is_bif(math, atan, 1) -> true;
+is_bif(math, atan2, 2) -> true;
+is_bif(math, atanh, 1) -> true;
+is_bif(math, cos, 1) -> true;
+is_bif(math, cosh, 1) -> true;
+is_bif(math, erf, 1) -> true;
+is_bif(math, erfc, 1) -> true;
+is_bif(math, exp, 1) -> true;
+is_bif(math, log, 1) -> true;
+is_bif(math, log10, 1) -> true;
+is_bif(math, pow, 2) -> true;
+is_bif(math, sin, 1) -> true;
+is_bif(math, sinh, 1) -> true;
+is_bif(math, sqrt, 1) -> true;
+is_bif(math, tan, 1) -> true;
+is_bif(math, tanh, 1) -> true;
+is_bif(_, _, _) -> false.
+
+
+%% =====================================================================
+%% is_guard_bif(Module, Name, Arity) -> boolean()
+%%
+%% Module = Name = atom()
+%% Arity = integer()
+%%
+%% Returns `true' if the built-in function `Module:Name/Arity' may
+%% be called from a clause guard. Note that such "guard BIFs" are
+%% not necessarily "pure", since some (notably `erlang:self/0') may
+%% depend on the current state, nor "safe", since many guard BIFs
+%% can fail. Also note that even a "pure" function could be
+%% unsuitable for calling from a guard because of its time or space
+%% complexity.
+
+is_guard_bif(erlang, '*', 2) -> true;
+is_guard_bif(erlang, '+', 1) -> true;
+is_guard_bif(erlang, '+', 2) -> true;
+is_guard_bif(erlang, '-', 1) -> true;
+is_guard_bif(erlang, '-', 2) -> true;
+is_guard_bif(erlang, '/', 2) -> true;
+is_guard_bif(erlang, '/=', 2) -> true;
+is_guard_bif(erlang, '<', 2) -> true;
+is_guard_bif(erlang, '=/=', 2) -> true;
+is_guard_bif(erlang, '=:=', 2) -> true;
+is_guard_bif(erlang, '=<', 2) -> true;
+is_guard_bif(erlang, '==', 2) -> true;
+is_guard_bif(erlang, '>', 2) -> true;
+is_guard_bif(erlang, '>=', 2) -> true;
+is_guard_bif(erlang, 'and', 2) -> true;
+is_guard_bif(erlang, 'band', 2) -> true;
+is_guard_bif(erlang, 'bnot', 1) -> true;
+is_guard_bif(erlang, 'bor', 2) -> true;
+is_guard_bif(erlang, 'bsl', 2) -> true;
+is_guard_bif(erlang, 'bsr', 2) -> true;
+is_guard_bif(erlang, 'bxor', 2) -> true;
+is_guard_bif(erlang, 'div', 2) -> true;
+is_guard_bif(erlang, 'not', 1) -> true;
+is_guard_bif(erlang, 'or', 2) -> true;
+is_guard_bif(erlang, 'rem', 2) -> true;
+is_guard_bif(erlang, 'xor', 2) -> true;
+is_guard_bif(erlang, abs, 1) -> true;
+is_guard_bif(erlang, element, 2) -> true;
+is_guard_bif(erlang, error, 1) -> true; % unorthodox
+is_guard_bif(erlang, exit, 1) -> true; % unorthodox
+is_guard_bif(erlang, fault, 1) -> true; % unorthodox
+is_guard_bif(erlang, float, 1) -> true; % (the type coercion function)
+is_guard_bif(erlang, hd, 1) -> true;
+is_guard_bif(erlang, is_atom, 1) -> true;
+is_guard_bif(erlang, is_boolean, 1) -> true;
+is_guard_bif(erlang, is_binary, 1) -> true;
+is_guard_bif(erlang, is_constant, 1) -> true;
+is_guard_bif(erlang, is_float, 1) -> true;
+is_guard_bif(erlang, is_function, 1) -> true;
+is_guard_bif(erlang, is_integer, 1) -> true;
+is_guard_bif(erlang, is_list, 1) -> true;
+is_guard_bif(erlang, is_number, 1) -> true;
+is_guard_bif(erlang, is_pid, 1) -> true;
+is_guard_bif(erlang, is_port, 1) -> true;
+is_guard_bif(erlang, is_reference, 1) -> true;
+is_guard_bif(erlang, is_tuple, 1) -> true;
+is_guard_bif(erlang, length, 1) -> true;
+is_guard_bif(erlang, list_to_atom, 1) -> true; % unorthodox
+is_guard_bif(erlang, node, 0) -> true; % (not pure)
+is_guard_bif(erlang, node, 1) -> true; % (not pure)
+is_guard_bif(erlang, round, 1) -> true;
+is_guard_bif(erlang, self, 0) -> true; % (not pure)
+is_guard_bif(erlang, size, 1) -> true;
+is_guard_bif(erlang, throw, 1) -> true; % unorthodox
+is_guard_bif(erlang, tl, 1) -> true;
+is_guard_bif(erlang, trunc, 1) -> true;
+is_guard_bif(math, acos, 1) -> true; % unorthodox
+is_guard_bif(math, acosh, 1) -> true; % unorthodox
+is_guard_bif(math, asin, 1) -> true; % unorthodox
+is_guard_bif(math, asinh, 1) -> true; % unorthodox
+is_guard_bif(math, atan, 1) -> true; % unorthodox
+is_guard_bif(math, atan2, 2) -> true; % unorthodox
+is_guard_bif(math, atanh, 1) -> true; % unorthodox
+is_guard_bif(math, cos, 1) -> true; % unorthodox
+is_guard_bif(math, cosh, 1) -> true; % unorthodox
+is_guard_bif(math, erf, 1) -> true; % unorthodox
+is_guard_bif(math, erfc, 1) -> true; % unorthodox
+is_guard_bif(math, exp, 1) -> true; % unorthodox
+is_guard_bif(math, log, 1) -> true; % unorthodox
+is_guard_bif(math, log10, 1) -> true; % unorthodox
+is_guard_bif(math, pow, 2) -> true; % unorthodox
+is_guard_bif(math, sin, 1) -> true; % unorthodox
+is_guard_bif(math, sinh, 1) -> true; % unorthodox
+is_guard_bif(math, sqrt, 1) -> true; % unorthodox
+is_guard_bif(math, tan, 1) -> true; % unorthodox
+is_guard_bif(math, tanh, 1) -> true; % unorthodox
+is_guard_bif(_, _, _) -> false.
+
+
+%% =====================================================================
+%% is_pure(Module, Name, Arity) -> boolean()
+%%
+%% Module = Name = atom()
+%% Arity = integer()
+%%
+%% Returns `true' if the function `Module:Name/Arity' does not
+%% affect the state, nor depend on the state, although its
+%% evaluation is not guaranteed to complete normally for all input.
+
+is_pure(erlang, '*', 2) -> true;
+is_pure(erlang, '+', 1) -> true; % (even for non-numbers)
+is_pure(erlang, '+', 2) -> true;
+is_pure(erlang, '++', 2) -> true;
+is_pure(erlang, '-', 1) -> true;
+is_pure(erlang, '-', 2) -> true;
+is_pure(erlang, '--', 2) -> true;
+is_pure(erlang, '/', 2) -> true;
+is_pure(erlang, '/=', 2) -> true;
+is_pure(erlang, '<', 2) -> true;
+is_pure(erlang, '=/=', 2) -> true;
+is_pure(erlang, '=:=', 2) -> true;
+is_pure(erlang, '=<', 2) -> true;
+is_pure(erlang, '==', 2) -> true;
+is_pure(erlang, '>', 2) -> true;
+is_pure(erlang, '>=', 2) -> true;
+is_pure(erlang, 'and', 2) -> true;
+is_pure(erlang, 'band', 2) -> true;
+is_pure(erlang, 'bnot', 1) -> true;
+is_pure(erlang, 'bor', 2) -> true;
+is_pure(erlang, 'bsl', 2) -> true;
+is_pure(erlang, 'bsr', 2) -> true;
+is_pure(erlang, 'bxor', 2) -> true;
+is_pure(erlang, 'div', 2) -> true;
+is_pure(erlang, 'not', 1) -> true;
+is_pure(erlang, 'or', 2) -> true;
+is_pure(erlang, 'rem', 2) -> true;
+is_pure(erlang, 'xor', 2) -> true;
+is_pure(erlang, abs, 1) -> true;
+is_pure(erlang, atom_to_list, 1) -> true;
+is_pure(erlang, binary_to_list, 1) -> true;
+is_pure(erlang, binary_to_list, 3) -> true;
+is_pure(erlang, concat_binary, 1) -> true;
+is_pure(erlang, element, 2) -> true;
+is_pure(erlang, float, 1) -> true;
+is_pure(erlang, float_to_list, 1) -> true;
+is_pure(erlang, hash, 2) -> false;
+is_pure(erlang, hd, 1) -> true;
+is_pure(erlang, integer_to_list, 1) -> true;
+is_pure(erlang, is_atom, 1) -> true;
+is_pure(erlang, is_boolean, 1) -> true;
+is_pure(erlang, is_binary, 1) -> true;
+is_pure(erlang, is_builtin, 3) -> true;
+is_pure(erlang, is_constant, 1) -> true;
+is_pure(erlang, is_float, 1) -> true;
+is_pure(erlang, is_function, 1) -> true;
+is_pure(erlang, is_integer, 1) -> true;
+is_pure(erlang, is_list, 1) -> true;
+is_pure(erlang, is_number, 1) -> true;
+is_pure(erlang, is_pid, 1) -> true;
+is_pure(erlang, is_port, 1) -> true;
+is_pure(erlang, is_record, 3) -> true;
+is_pure(erlang, is_reference, 1) -> true;
+is_pure(erlang, is_tuple, 1) -> true;
+is_pure(erlang, length, 1) -> true;
+is_pure(erlang, list_to_atom, 1) -> true;
+is_pure(erlang, list_to_binary, 1) -> true;
+is_pure(erlang, list_to_float, 1) -> true;
+is_pure(erlang, list_to_integer, 1) -> true;
+is_pure(erlang, list_to_pid, 1) -> true;
+is_pure(erlang, list_to_tuple, 1) -> true;
+is_pure(erlang, phash, 2) -> false;
+is_pure(erlang, pid_to_list, 1) -> true;
+is_pure(erlang, round, 1) -> true;
+is_pure(erlang, setelement, 3) -> true;
+is_pure(erlang, size, 1) -> true;
+is_pure(erlang, split_binary, 2) -> true;
+is_pure(erlang, term_to_binary, 1) -> true;
+is_pure(erlang, tl, 1) -> true;
+is_pure(erlang, trunc, 1) -> true;
+is_pure(erlang, tuple_to_list, 1) -> true;
+is_pure(lists, append, 2) -> true;
+is_pure(lists, subtract, 2) -> true;
+is_pure(math, acos, 1) -> true;
+is_pure(math, acosh, 1) -> true;
+is_pure(math, asin, 1) -> true;
+is_pure(math, asinh, 1) -> true;
+is_pure(math, atan, 1) -> true;
+is_pure(math, atan2, 2) -> true;
+is_pure(math, atanh, 1) -> true;
+is_pure(math, cos, 1) -> true;
+is_pure(math, cosh, 1) -> true;
+is_pure(math, erf, 1) -> true;
+is_pure(math, erfc, 1) -> true;
+is_pure(math, exp, 1) -> true;
+is_pure(math, log, 1) -> true;
+is_pure(math, log10, 1) -> true;
+is_pure(math, pow, 2) -> true;
+is_pure(math, sin, 1) -> true;
+is_pure(math, sinh, 1) -> true;
+is_pure(math, sqrt, 1) -> true;
+is_pure(math, tan, 1) -> true;
+is_pure(math, tanh, 1) -> true;
+is_pure(_, _, _) -> false.
+
+
+%% =====================================================================
+%% is_safe(Module, Name, Arity) -> boolean()
+%%
+%% Module = Name = atom()
+%% Arity = integer()
+%%
+%% Returns `true' if the function `Module:Name/Arity' is completely
+%% effect free, i.e., if its evaluation always completes normally
+%% and does not affect the state (although the value it returns
+%% might depend on the state).
+
+is_safe(erlang, '/=', 2) -> true;
+is_safe(erlang, '<', 2) -> true;
+is_safe(erlang, '=/=', 2) -> true;
+is_safe(erlang, '=:=', 2) -> true;
+is_safe(erlang, '=<', 2) -> true;
+is_safe(erlang, '==', 2) -> true;
+is_safe(erlang, '>', 2) -> true;
+is_safe(erlang, '>=', 2) -> true;
+is_safe(erlang, date, 0) -> true;
+is_safe(erlang, get, 0) -> true;
+is_safe(erlang, get, 1) -> true;
+is_safe(erlang, get_cookie, 0) -> true;
+is_safe(erlang, get_keys, 1) -> true;
+is_safe(erlang, group_leader, 0) -> true;
+is_safe(erlang, is_alive, 0) -> true;
+is_safe(erlang, is_atom, 1) -> true;
+is_safe(erlang, is_boolean, 1) -> true;
+is_safe(erlang, is_binary, 1) -> true;
+is_safe(erlang, is_constant, 1) -> true;
+is_safe(erlang, is_float, 1) -> true;
+is_safe(erlang, is_function, 1) -> true;
+is_safe(erlang, is_integer, 1) -> true;
+is_safe(erlang, is_list, 1) -> true;
+is_safe(erlang, is_number, 1) -> true;
+is_safe(erlang, is_pid, 1) -> true;
+is_safe(erlang, is_port, 1) -> true;
+is_safe(erlang, is_record, 3) -> true;
+is_safe(erlang, is_reference, 1) -> true;
+is_safe(erlang, is_tuple, 1) -> true;
+is_safe(erlang, make_ref, 0) -> true;
+is_safe(erlang, node, 0) -> true;
+is_safe(erlang, nodes, 0) -> true;
+is_safe(erlang, ports, 0) -> true;
+is_safe(erlang, pre_loaded, 0) -> true;
+is_safe(erlang, processes, 0) -> true;
+is_safe(erlang, registered, 0) -> true;
+is_safe(erlang, self, 0) -> true;
+is_safe(erlang, term_to_binary, 1) -> true;
+is_safe(erlang, time, 0) -> true;
+is_safe(_, _, _) -> false.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/rec_env.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/rec_env.erl
new file mode 100644
index 0000000000..0dd31b71ea
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/rec_env.erl
@@ -0,0 +1,611 @@
+%% =====================================================================
+%% This library is free software; you can redistribute it and/or modify
+%% it under the terms of the GNU Lesser General Public License as
+%% published by the Free Software Foundation; either version 2 of the
+%% License, or (at your option) any later version.
+%%
+%% This library is distributed in the hope that it will be useful, but
+%% WITHOUT ANY WARRANTY; without even the implied warranty of
+%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+%% Lesser General Public License for more details.
+%%
+%% You should have received a copy of the GNU Lesser General Public
+%% License along with this library; if not, write to the Free Software
+%% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
+%% USA
+%%
+%% $Id: rec_env.erl,v 1.2 2009/09/17 09:46:19 kostis Exp $
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 1999-2004 Richard Carlsson
+%% @doc Abstract environments, supporting self-referential bindings and
+%% automatic new-key generation.
+
+%% The current implementation is based on Erlang standard library
+%% dictionaries.
+
+%%% -define(DEBUG, true).
+
+-module(rec_env).
+
+-export([bind/3, bind_list/3, bind_recursive/4, delete/2, empty/0,
+ get/2, is_defined/2, is_empty/1, keys/1, lookup/2, new_key/1,
+ new_key/2, new_keys/2, new_keys/3, size/1, to_list/1]).
+
+-ifdef(DEBUG).
+-export([test/1, test_custom/1, test_custom/2]).
+-endif.
+
+-ifdef(DEBUG).
+%% Code for testing:
+%%@hidden
+test(N) ->
+ test_0(integer, N).
+
+%%@hidden
+test_custom(N) ->
+ F = fun (X) -> list_to_atom("X"++integer_to_list(X)) end,
+ test_custom(F, N).
+
+%%@hidden
+test_custom(F, N) ->
+ test_0({custom, F}, N).
+
+test_0(Type, N) ->
+ put(new_key_calls, 0),
+ put(new_key_retries, 0),
+ put(new_key_max, 0),
+ Env = test_1(Type, N, empty()),
+ io:fwrite("\ncalls: ~w.\n", [get(new_key_calls)]),
+ io:fwrite("\nretries: ~w.\n", [get(new_key_retries)]),
+ io:fwrite("\nmax: ~w.\n", [get(new_key_max)]),
+ dict:to_list(element(1,Env)).
+
+test_1(integer = Type, N, Env) when integer(N), N > 0 ->
+ Key = new_key(Env),
+ test_1(Type, N - 1, bind(Key, value, Env));
+test_1({custom, F} = Type, N, Env) when integer(N), N > 0 ->
+ Key = new_key(F, Env),
+ test_1(Type, N - 1, bind(Key, value, Env));
+test_1(_,0, Env) ->
+ Env.
+-endif.
+
+
+%% Representation:
+%%
+%% environment() = [Mapping]
+%%
+%% Mapping = {map, Dict} | {rec, Dict, Dict}
+%% Dict = dict:dictionary()
+%%
+%% An empty environment is a list containing a single `{map, Dict}'
+%% element - empty lists are not valid environments. To find a key in an
+%% environment, it is searched for in each mapping in the list, in
+%% order, until it the key is found in some mapping, or the end of the
+%% list is reached. In a 'rec' mapping, we keep the original dictionary
+%% together with a version where entries may have been deleted - this
+%% makes it possible to garbage collect the entire 'rec' mapping when
+%% all its entries are unused (for example, by being shadowed by later
+%% definitions).
+
+
+
+%% =====================================================================
+%% @type environment(). An abstract environment.
+
+
+%% =====================================================================
+%% @spec empty() -> environment()
+%%
+%% @doc Returns an empty environment.
+
+empty() ->
+ [{map, dict:new()}].
+
+
+%% =====================================================================
+%% @spec is_empty(Env::environment()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if the environment is empty, otherwise
+%% <code>false</code>.
+
+is_empty([{map, Dict} | Es]) ->
+ N = dict:size(Dict),
+ if N /= 0 -> false;
+ Es == [] -> true;
+ true -> is_empty(Es)
+ end;
+is_empty([{rec, Dict, _} | Es]) ->
+ N = dict:size(Dict),
+ if N /= 0 -> false;
+ Es == [] -> true;
+ true -> is_empty(Es)
+ end.
+
+
+%% =====================================================================
+%% @spec size(Env::environment()) -> integer()
+%%
+%% @doc Returns the number of entries in an environment.
+
+%% (The name 'size' cannot be used in local calls, since there exists a
+%% built-in function with the same name.)
+
+size(Env) ->
+ env_size(Env).
+
+env_size([{map, Dict}]) ->
+ dict:size(Dict);
+env_size([{map, Dict} | Env]) ->
+ dict:size(Dict) + env_size(Env);
+env_size([{rec, Dict, _Dict0} | Env]) ->
+ dict:size(Dict) + env_size(Env).
+
+
+%% =====================================================================
+%% @spec is_defined(Key, Env) -> boolean()
+%%
+%% Key = term()
+%% Env = environment()
+%%
+%% @doc Returns <code>true</code> if <code>Key</code> is bound in the
+%% environment, otherwise <code>false</code>.
+
+is_defined(Key, [{map, Dict} | Env]) ->
+ case dict:is_key(Key, Dict) of
+ true ->
+ true;
+ false when Env == [] ->
+ false;
+ false ->
+ is_defined(Key, Env)
+ end;
+is_defined(Key, [{rec, Dict, _Dict0} | Env]) ->
+ case dict:is_key(Key, Dict) of
+ true ->
+ true;
+ false ->
+ is_defined(Key, Env)
+ end.
+
+
+%% =====================================================================
+%% @spec keys(Env::environment()) -> [term()]
+%%
+%% @doc Returns the ordered list of all keys in the environment.
+
+keys(Env) ->
+ lists:sort(keys(Env, [])).
+
+keys([{map, Dict}], S) ->
+ dict:fetch_keys(Dict) ++ S;
+keys([{map, Dict} | Env], S) ->
+ keys(Env, dict:fetch_keys(Dict) ++ S);
+keys([{rec, Dict, _Dict0} | Env], S) ->
+ keys(Env, dict:fetch_keys(Dict) ++ S).
+
+
+%% =====================================================================
+%% @spec to_list(Env) -> [{Key, Value}]
+%%
+%% Env = environment()
+%% Key = term()
+%% Value = term()
+%%
+%% @doc Returns an ordered list of <code>{Key, Value}</code> pairs for
+%% all keys in <code>Env</code>. <code>Value</code> is the same as that
+%% returned by {@link get/2}.
+
+to_list(Env) ->
+ lists:sort(to_list(Env, [])).
+
+to_list([{map, Dict}], S) ->
+ dict:to_list(Dict) ++ S;
+to_list([{map, Dict} | Env], S) ->
+ to_list(Env, dict:to_list(Dict) ++ S);
+to_list([{rec, Dict, _Dict0} | Env], S) ->
+ to_list(Env, dict:to_list(Dict) ++ S).
+
+
+%% =====================================================================
+%% @spec bind(Key, Value, Env) -> environment()
+%%
+%% Key = term()
+%% Value = term()
+%% Env = environment()
+%%
+%% @doc Make a nonrecursive entry. This binds <code>Key</code> to
+%% <code>Value</code>. If the key already existed in the environment,
+%% the old entry is replaced.
+
+%% Note that deletion is done to free old bindings so they can be
+%% garbage collected.
+
+bind(Key, Value, [{map, Dict}]) ->
+ [{map, dict:store(Key, Value, Dict)}];
+bind(Key, Value, [{map, Dict} | Env]) ->
+ [{map, dict:store(Key, Value, Dict)} | delete_any(Key, Env)];
+bind(Key, Value, Env) ->
+ [{map, dict:store(Key, Value, dict:new())} | delete_any(Key, Env)].
+
+
+%% =====================================================================
+%% @spec bind_list(Keys, Values, Env) -> environment()
+%%
+%% Keys = [term()]
+%% Values = [term()]
+%% Env = environment()
+%%
+%% @doc Make N nonrecursive entries. This binds each key in
+%% <code>Keys</code> to the corresponding value in
+%% <code>Values</code>. If some key already existed in the environment,
+%% the previous entry is replaced. If <code>Keys</code> does not have
+%% the same length as <code>Values</code>, an exception is generated.
+
+bind_list(Ks, Vs, [{map, Dict}]) ->
+ [{map, store_list(Ks, Vs, Dict)}];
+bind_list(Ks, Vs, [{map, Dict} | Env]) ->
+ [{map, store_list(Ks, Vs, Dict)} | delete_list(Ks, Env)];
+bind_list(Ks, Vs, Env) ->
+ [{map, store_list(Ks, Vs, dict:new())} | delete_list(Ks, Env)].
+
+store_list([K | Ks], [V | Vs], Dict) ->
+ store_list(Ks, Vs, dict:store(K, V, Dict));
+store_list([], _, Dict) ->
+ Dict.
+
+delete_list([K | Ks], Env) ->
+ delete_list(Ks, delete_any(K, Env));
+delete_list([], Env) ->
+ Env.
+
+%% By not calling `delete' unless we have to, we avoid unnecessary
+%% rewriting of the data.
+
+delete_any(Key, Env) ->
+ case is_defined(Key, Env) of
+ true ->
+ delete(Key, Env);
+ false ->
+ Env
+ end.
+
+%% =====================================================================
+%% @spec delete(Key, Env) -> environment()
+%%
+%% Key = term()
+%% Env = environment()
+%%
+%% @doc Delete an entry. This removes <code>Key</code> from the
+%% environment.
+
+delete(Key, [{map, Dict} = E | Env]) ->
+ case dict:is_key(Key, Dict) of
+ true ->
+ [{map, dict:erase(Key, Dict)} | Env];
+ false ->
+ delete_1(Key, Env, E)
+ end;
+delete(Key, [{rec, Dict, Dict0} = E | Env]) ->
+ case dict:is_key(Key, Dict) of
+ true ->
+ %% The Dict0 component must be preserved as it is until all
+ %% keys in Dict have been deleted.
+ Dict1 = dict:erase(Key, Dict),
+ case dict:size(Dict1) of
+ 0 ->
+ Env; % the whole {rec,...} is now garbage
+ _ ->
+ [{rec, Dict1, Dict0} | Env]
+ end;
+ false ->
+ [E | delete(Key, Env)]
+ end.
+
+%% This is just like above, except we pass on the preceding 'map'
+%% mapping in the list to enable merging when removing 'rec' mappings.
+
+delete_1(Key, [{rec, Dict, Dict0} = E | Env], E1) ->
+ case dict:is_key(Key, Dict) of
+ true ->
+ Dict1 = dict:erase(Key, Dict),
+ case dict:size(Dict1) of
+ 0 ->
+ concat(E1, Env);
+ _ ->
+ [E1, {rec, Dict1, Dict0} | Env]
+ end;
+ false ->
+ [E1, E | delete(Key, Env)]
+ end.
+
+concat({map, D1}, [{map, D2} | Env]) ->
+ [dict:merge(fun (_K, V1, _V2) -> V1 end, D1, D2) | Env];
+concat(E1, Env) ->
+ [E1 | Env].
+
+
+%% =====================================================================
+%% @spec bind_recursive(Keys, Values, Fun, Env) -> NewEnv
+%%
+%% Keys = [term()]
+%% Values = [term()]
+%% Fun = (Value, Env) -> term()
+%% Env = environment()
+%% NewEnv = environment()
+%%
+%% @doc Make N recursive entries. This binds each key in
+%% <code>Keys</code> to the value of <code>Fun(Value, NewEnv)</code> for
+%% the corresponding <code>Value</code>. If <code>Keys</code> does not
+%% have the same length as <code>Values</code>, an exception is
+%% generated. If some key already existed in the environment, the old
+%% entry is replaced.
+%%
+%% <p>Note: the function <code>Fun</code> is evaluated each time one of
+%% the stored keys is looked up, but only then.</p>
+%%
+%% <p>Examples:
+%%<pre>
+%% NewEnv = bind_recursive([foo, bar], [1, 2],
+%% fun (V, E) -> V end,
+%% Env)</pre>
+%%
+%% This does nothing interesting; <code>get(foo, NewEnv)</code> yields
+%% <code>1</code> and <code>get(bar, NewEnv)</code> yields
+%% <code>2</code>, but there is more overhead than if the {@link
+%% bind_list/3} function had been used.
+%%
+%% <pre>
+%% NewEnv = bind_recursive([foo, bar], [1, 2],
+%% fun (V, E) -> {V, E} end,
+%% Env)</pre>
+%%
+%% Here, however, <code>get(foo, NewEnv)</code> will yield <code>{1,
+%% NewEnv}</code> and <code>get(bar, NewEnv)</code> will yield <code>{2,
+%% NewEnv}</code>, i.e., the environment <code>NewEnv</code> contains
+%% recursive bindings.</p>
+
+bind_recursive([], [], _, Env) ->
+ Env;
+bind_recursive(Ks, Vs, F, Env) ->
+ F1 = fun (V) ->
+ fun (Dict) -> F(V, [{rec, Dict, Dict} | Env]) end
+ end,
+ Dict = bind_recursive_1(Ks, Vs, F1, dict:new()),
+ [{rec, Dict, Dict} | Env].
+
+bind_recursive_1([K | Ks], [V | Vs], F, Dict) ->
+ bind_recursive_1(Ks, Vs, F, dict:store(K, F(V), Dict));
+bind_recursive_1([], [], _, Dict) ->
+ Dict.
+
+
+%% =====================================================================
+%% @spec lookup(Key, Env) -> error | {ok, Value}
+%%
+%% Key = term()
+%% Env = environment()
+%% Value = term()
+%%
+%% @doc Returns <code>{ok, Value}</code> if <code>Key</code> is bound to
+%% <code>Value</code> in <code>Env</code>, and <code>error</code>
+%% otherwise.
+
+lookup(Key, [{map, Dict} | Env]) ->
+ case dict:find(Key, Dict) of
+ {ok, _}=Value ->
+ Value;
+ error when Env == [] ->
+ error;
+ error ->
+ lookup(Key, Env)
+ end;
+lookup(Key, [{rec, Dict, Dict0} | Env]) ->
+ case dict:find(Key, Dict) of
+ {ok, F} ->
+ {ok, F(Dict0)};
+ error ->
+ lookup(Key, Env)
+ end.
+
+
+%% =====================================================================
+%% @spec get(Key, Env) -> Value
+%%
+%% Key = term()
+%% Env = environment()
+%% Value = term()
+%%
+%% @doc Returns the value that <code>Key</code> is bound to in
+%% <code>Env</code>. Throws <code>{undefined, Key}</code> if the key
+%% does not exist in <code>Env</code>.
+
+get(Key, Env) ->
+ case lookup(Key, Env) of
+ {ok, Value} -> Value;
+ error -> throw({undefined, Key})
+ end.
+
+
+%% =====================================================================
+%% The key-generating algorithm could possibly be further improved. The
+%% important thing to keep in mind is, that when we need a new key, we
+%% are generally in mid-traversal of a syntax tree, and existing names
+%% in the tree may be closely grouped and evenly distributed or even
+%% forming a compact range (often having been generated by a "gensym",
+%% or by this very algorithm itself). This means that if we generate an
+%% identifier whose value is too close to those already seen (i.e.,
+%% which are in the environment), it is very probable that we will
+%% shadow a not-yet-seen identifier further down in the tree, the result
+%% being that we induce another later renaming, and end up renaming most
+%% of the identifiers, completely contrary to our intention. We need to
+%% generate new identifiers in a way that avoids such systematic
+%% collisions.
+%%
+%% One way of getting a new key to try when the previous attempt failed
+%% is of course to e.g. add one to the last tried value. However, in
+%% general it's a bad idea to try adjacent identifiers: the percentage
+%% of retries will typically increase a lot, so you may lose big on the
+%% extra lookups while gaining only a little from the quicker
+%% computation.
+%%
+%% We want an initial range that is large enough for most typical cases.
+%% If we start with, say, a range of 10, we might quickly use up most of
+%% the values in the range 1-10 (or 1-100) for new top-level variables -
+%% but as we start traversing the syntax tree, it is quite likely that
+%% exactly those variables will be encountered again (this depends on
+%% how the names in the tree were created), and will then need to be
+%% renamed. If we instead begin with a larger range, it is less likely
+%% that any top-level names that we introduce will shadow names that we
+%% will find in the tree. Of course we cannot know how large is large
+%% enough: for any initial range, there is some syntax tree that uses
+%% all the values in that range, and thus any top-level names introduced
+%% will shadow names in the tree. The point is to avoid this happening
+%% all the time - a range of about 1000 seems enough for most programs.
+%%
+%% The following values have been shown to work well:
+
+-define(MINIMUM_RANGE, 1000).
+-define(START_RANGE_FACTOR, 50).
+-define(MAX_RETRIES, 2). % retries before enlarging range
+-define(ENLARGE_FACTOR, 10). % range enlargment factor
+
+-ifdef(DEBUG).
+%% If you want to use these process dictionary counters, make sure to
+%% initialise them to zero before you call any of the key-generating
+%% functions.
+%%
+%% new_key_calls total number of calls
+%% new_key_retries failed key generation attempts
+%% new_key_max maximum generated integer value
+%%
+-define(measure_calls(),
+ put(new_key_calls, 1 + get(new_key_calls))).
+-define(measure_max_key(N),
+ case N > get(new_key_max) of
+ true ->
+ put(new_key_max, N);
+ false ->
+ ok
+ end).
+-define(measure_retries(N),
+ put(new_key_retries, get(new_key_retries) + N)).
+-else.
+-define(measure_calls(), ok).
+-define(measure_max_key(N), ok).
+-define(measure_retries(N), ok).
+-endif.
+
+
+%% =====================================================================
+%% @spec new_key(Env::environment()) -> integer()
+%%
+%% @doc Returns an integer which is not already used as key in the
+%% environment. New integers are generated using an algorithm which
+%% tries to keep the values randomly distributed within a reasonably
+%% small range relative to the number of entries in the environment.
+%%
+%% <p>This function uses the Erlang standard library module
+%% <code>random</code> to generate new keys.</p>
+%%
+%% <p>Note that only the new key is returned; the environment itself is
+%% not updated by this function.</p>
+
+new_key(Env) ->
+ new_key(fun (X) -> X end, Env).
+
+
+%% =====================================================================
+%% @spec new_key(Function, Env) -> term()
+%%
+%% Function = (integer()) -> term()
+%% Env = environment()
+%%
+%% @doc Returns a term which is not already used as key in the
+%% environment. The term is generated by applying <code>Function</code>
+%% to an integer generated as in {@link new_key/1}.
+%%
+%% <p>Note that only the generated term is returned; the environment
+%% itself is not updated by this function.</p>
+
+new_key(F, Env) ->
+ ?measure_calls(),
+ R = start_range(Env),
+%%% io:fwrite("Start range: ~w.\n", [R]),
+ new_key(R, F, Env).
+
+new_key(R, F, Env) ->
+ new_key(generate(R, R), R, 0, F, Env).
+
+new_key(N, R, T, F, Env) when T < ?MAX_RETRIES ->
+ A = F(N),
+ case is_defined(A, Env) of
+ true ->
+%%% io:fwrite("CLASH: ~w.\n", [A]),
+ new_key(generate(N, R), R, T + 1, F, Env);
+ false ->
+ ?measure_max_key(N),
+ ?measure_retries(T),
+%%% io:fwrite("New: ~w.\n", [N]),
+ A
+ end;
+new_key(N, R, _T, F, Env) ->
+ %% Too many retries - enlarge the range and start over.
+ ?measure_retries((_T + 1)),
+ R1 = trunc(R * ?ENLARGE_FACTOR),
+%%% io:fwrite("**NEW RANGE**: ~w.\n", [R1]),
+ new_key(generate(N, R1), R1, 0, F, Env).
+
+start_range(Env) ->
+ max(env_size(Env) * ?START_RANGE_FACTOR, ?MINIMUM_RANGE).
+
+max(X, Y) when X > Y -> X;
+max(_, Y) -> Y.
+
+%% The previous key might or might not be used to compute the next key
+%% to be tried. It is currently not used.
+%%
+%% In order to avoid causing cascading renamings, it is important that
+%% this function does not generate values in order, but
+%% (pseudo-)randomly distributed over the range.
+
+generate(_N, Range) ->
+ random:uniform(Range). % works well
+
+
+%% =====================================================================
+%% @spec new_keys(N, Env) -> [integer()]
+%%
+%% N = integer()
+%% Env = environment()
+%%
+%% @doc Returns a list of <code>N</code> distinct integers that are not
+%% already used as keys in the environment. See {@link new_key/1} for
+%% details.
+
+new_keys(N, Env) when integer(N) ->
+ new_keys(N, fun (X) -> X end, Env).
+
+
+%% =====================================================================
+%% @spec new_keys(N, Function, Env) -> [term()]
+%%
+%% N = integer()
+%% Function = (integer()) -> term()
+%% Env = environment()
+%%
+%% @doc Returns a list of <code>N</code> distinct terms that are not
+%% already used as keys in the environment. See {@link new_key/3} for
+%% details.
+
+new_keys(N, F, Env) when integer(N) ->
+ R = start_range(Env),
+ new_keys(N, [], R, F, Env).
+
+new_keys(N, Ks, R, F, Env) when N > 0 ->
+ Key = new_key(R, F, Env),
+ Env1 = bind(Key, true, Env), % dummy binding
+ new_keys(N - 1, [Key | Ks], R, F, Env1);
+new_keys(0, Ks, _, _, _) ->
+ Ks.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_expand_pmod.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_expand_pmod.erl
new file mode 100644
index 0000000000..c5052b0e51
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_expand_pmod.erl
@@ -0,0 +1,425 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: sys_expand_pmod.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+-module(sys_expand_pmod).
+
+%% Expand function definition forms of parameterized module. We assume
+%% all record definitions, imports, queries, etc., have been expanded
+%% away. Any calls on the form 'foo(...)' must be calls to local
+%% functions. Auto-generated functions (module_info,...) have not yet
+%% been added to the function definitions, but are listed in 'defined'
+%% and 'exports'. The 'new/N' function is neither added to the
+%% definitions nor to the 'exports'/'defines' lists yet.
+
+-export([forms/4]).
+
+-record(pmod, {parameters, exports, defined, predef}).
+
+%% TODO: more abstract handling of predefined/static functions.
+
+forms(Fs0, Ps, Es0, Ds0) ->
+ PreDef = [{module_info,0},{module_info,1}],
+ forms(Fs0, Ps, Es0, Ds0, PreDef).
+
+forms(Fs0, Ps, Es0, Ds0, PreDef) ->
+ St0 = #pmod{parameters=Ps,exports=Es0,defined=Ds0, predef=PreDef},
+ {Fs1, St1} = forms(Fs0, St0),
+ Es1 = update_function_names(Es0, St1),
+ Ds1 = update_function_names(Ds0, St1),
+ Fs2 = update_forms(Fs1, St1),
+ {Fs2,Es1,Ds1}.
+
+%% This is extremely simplistic for now; all functions get an extra
+%% parameter, whether they need it or not, except for static functions.
+
+update_function_names(Es, St) ->
+ [update_function_name(E, St) || E <- Es].
+
+update_function_name(E={F,A}, St) ->
+ case ordsets:is_element(E, St#pmod.predef) of
+ true -> E;
+ false -> {F, A + 1}
+ end.
+
+update_forms([{function,L,N,A,Cs}|Fs],St) ->
+ [{function,L,N,A+1,Cs}|update_forms(Fs,St)];
+update_forms([F|Fs],St) ->
+ [F|update_forms(Fs,St)];
+update_forms([],_St) ->
+ [].
+
+%% Process the program forms.
+
+forms([F0|Fs0],St0) ->
+ {F1,St1} = form(F0,St0),
+ {Fs1,St2} = forms(Fs0,St1),
+ {[F1|Fs1],St2};
+forms([], St0) ->
+ {[], St0}.
+
+%% Only function definitions are of interest here. State is not updated.
+form({function,Line,Name0,Arity0,Clauses0},St) ->
+ {Name,Arity,Clauses} = function(Name0, Arity0, Clauses0, St),
+ {{function,Line,Name,Arity,Clauses},St};
+%% Pass anything else through
+form(F,St) -> {F,St}.
+
+function(Name, Arity, Clauses0, St) ->
+ Clauses1 = clauses(Clauses0,St),
+ {Name,Arity,Clauses1}.
+
+clauses([C|Cs],St) ->
+ {clause,L,H,G,B} = clause(C,St),
+ T = {tuple,L,[{var,L,V} || V <- ['_'|St#pmod.parameters]]},
+ [{clause,L,H++[{match,L,T,{var,L,'THIS'}}],G,B}|clauses(Cs,St)];
+clauses([],_St) -> [].
+
+clause({clause,Line,H0,G0,B0},St) ->
+ H1 = head(H0,St),
+ G1 = guard(G0,St),
+ B1 = exprs(B0,St),
+ {clause,Line,H1,G1,B1}.
+
+head(Ps,St) -> patterns(Ps,St).
+
+patterns([P0|Ps],St) ->
+ P1 = pattern(P0,St),
+ [P1|patterns(Ps,St)];
+patterns([],_St) -> [].
+
+string_to_conses([], _Line, Tail) ->
+ Tail;
+string_to_conses([E|Rest], Line, Tail) ->
+ {cons, Line, {integer, Line, E}, string_to_conses(Rest, Line, Tail)}.
+
+pattern({var,Line,V},_St) -> {var,Line,V};
+pattern({match,Line,L0,R0},St) ->
+ L1 = pattern(L0,St),
+ R1 = pattern(R0,St),
+ {match,Line,L1,R1};
+pattern({integer,Line,I},_St) -> {integer,Line,I};
+pattern({char,Line,C},_St) -> {char,Line,C};
+pattern({float,Line,F},_St) -> {float,Line,F};
+pattern({atom,Line,A},_St) -> {atom,Line,A};
+pattern({string,Line,S},_St) -> {string,Line,S};
+pattern({nil,Line},_St) -> {nil,Line};
+pattern({cons,Line,H0,T0},St) ->
+ H1 = pattern(H0,St),
+ T1 = pattern(T0,St),
+ {cons,Line,H1,T1};
+pattern({tuple,Line,Ps0},St) ->
+ Ps1 = pattern_list(Ps0,St),
+ {tuple,Line,Ps1};
+pattern({bin,Line,Fs},St) ->
+ Fs2 = pattern_grp(Fs,St),
+ {bin,Line,Fs2};
+pattern({op,_Line,'++',{nil,_},R},St) ->
+ pattern(R,St);
+pattern({op,_Line,'++',{cons,Li,{char,C2,I},T},R},St) ->
+ pattern({cons,Li,{char,C2,I},{op,Li,'++',T,R}},St);
+pattern({op,_Line,'++',{cons,Li,{integer,L2,I},T},R},St) ->
+ pattern({cons,Li,{integer,L2,I},{op,Li,'++',T,R}},St);
+pattern({op,_Line,'++',{string,Li,L},R},St) ->
+ pattern(string_to_conses(L, Li, R),St);
+pattern({op,Line,Op,A},_St) ->
+ {op,Line,Op,A};
+pattern({op,Line,Op,L,R},_St) ->
+ {op,Line,Op,L,R}.
+
+pattern_grp([{bin_element,L1,E1,S1,T1} | Fs],St) ->
+ S2 = case S1 of
+ default ->
+ default;
+ _ ->
+ expr(S1,St)
+ end,
+ T2 = case T1 of
+ default ->
+ default;
+ _ ->
+ bit_types(T1)
+ end,
+ [{bin_element,L1,expr(E1,St),S2,T2} | pattern_grp(Fs,St)];
+pattern_grp([],_St) ->
+ [].
+
+bit_types([]) ->
+ [];
+bit_types([Atom | Rest]) when atom(Atom) ->
+ [Atom | bit_types(Rest)];
+bit_types([{Atom, Integer} | Rest]) when atom(Atom), integer(Integer) ->
+ [{Atom, Integer} | bit_types(Rest)].
+
+pattern_list([P0|Ps],St) ->
+ P1 = pattern(P0,St),
+ [P1|pattern_list(Ps,St)];
+pattern_list([],_St) -> [].
+
+guard([G0|Gs],St) when list(G0) ->
+ [guard0(G0,St) | guard(Gs,St)];
+guard(L,St) ->
+ guard0(L,St).
+
+guard0([G0|Gs],St) ->
+ G1 = guard_test(G0,St),
+ [G1|guard0(Gs,St)];
+guard0([],_St) -> [].
+
+guard_test(Expr={call,Line,{atom,La,F},As0},St) ->
+ case erl_internal:type_test(F, length(As0)) of
+ true ->
+ As1 = gexpr_list(As0,St),
+ {call,Line,{atom,La,F},As1};
+ _ ->
+ gexpr(Expr,St)
+ end;
+guard_test(Any,St) ->
+ gexpr(Any,St).
+
+gexpr({var,L,V},_St) ->
+ {var,L,V};
+% %% alternative implementation of accessing module parameters
+% case index(V,St#pmod.parameters) of
+% N when N > 0 ->
+% {call,L,{remote,L,{atom,L,erlang},{atom,L,element}},
+% [{integer,L,N+1},{var,L,'THIS'}]};
+% _ ->
+% {var,L,V}
+% end;
+gexpr({integer,Line,I},_St) -> {integer,Line,I};
+gexpr({char,Line,C},_St) -> {char,Line,C};
+gexpr({float,Line,F},_St) -> {float,Line,F};
+gexpr({atom,Line,A},_St) -> {atom,Line,A};
+gexpr({string,Line,S},_St) -> {string,Line,S};
+gexpr({nil,Line},_St) -> {nil,Line};
+gexpr({cons,Line,H0,T0},St) ->
+ H1 = gexpr(H0,St),
+ T1 = gexpr(T0,St),
+ {cons,Line,H1,T1};
+gexpr({tuple,Line,Es0},St) ->
+ Es1 = gexpr_list(Es0,St),
+ {tuple,Line,Es1};
+gexpr({call,Line,{atom,La,F},As0},St) ->
+ case erl_internal:guard_bif(F, length(As0)) of
+ true -> As1 = gexpr_list(As0,St),
+ {call,Line,{atom,La,F},As1}
+ end;
+% Pre-expansion generated calls to erlang:is_record/3 must also be handled
+gexpr({call,Line,{remote,La,{atom,Lb,erlang},{atom,Lc,is_record}},As0},St)
+ when length(As0) == 3 ->
+ As1 = gexpr_list(As0,St),
+ {call,Line,{remote,La,{atom,Lb,erlang},{atom,Lc,is_record}},As1};
+% Guard bif's can be remote, but only in the module erlang...
+gexpr({call,Line,{remote,La,{atom,Lb,erlang},{atom,Lc,F}},As0},St) ->
+ case erl_internal:guard_bif(F, length(As0)) or
+ erl_internal:arith_op(F, length(As0)) or
+ erl_internal:comp_op(F, length(As0)) or
+ erl_internal:bool_op(F, length(As0)) of
+ true -> As1 = gexpr_list(As0,St),
+ {call,Line,{remote,La,{atom,Lb,erlang},{atom,Lc,F}},As1}
+ end;
+% Unfortunately, writing calls as {M,F}(...) is also allowed.
+gexpr({call,Line,{tuple,La,[{atom,Lb,erlang},{atom,Lc,F}]},As0},St) ->
+ case erl_internal:guard_bif(F, length(As0)) or
+ erl_internal:arith_op(F, length(As0)) or
+ erl_internal:comp_op(F, length(As0)) or
+ erl_internal:bool_op(F, length(As0)) of
+ true -> As1 = gexpr_list(As0,St),
+ {call,Line,{tuple,La,[{atom,Lb,erlang},{atom,Lc,F}]},As1}
+ end;
+gexpr({bin,Line,Fs},St) ->
+ Fs2 = pattern_grp(Fs,St),
+ {bin,Line,Fs2};
+gexpr({op,Line,Op,A0},St) ->
+ case erl_internal:arith_op(Op, 1) or
+ erl_internal:bool_op(Op, 1) of
+ true -> A1 = gexpr(A0,St),
+ {op,Line,Op,A1}
+ end;
+gexpr({op,Line,Op,L0,R0},St) ->
+ case erl_internal:arith_op(Op, 2) or
+ erl_internal:bool_op(Op, 2) or
+ erl_internal:comp_op(Op, 2) of
+ true ->
+ L1 = gexpr(L0,St),
+ R1 = gexpr(R0,St),
+ {op,Line,Op,L1,R1}
+ end.
+
+gexpr_list([E0|Es],St) ->
+ E1 = gexpr(E0,St),
+ [E1|gexpr_list(Es,St)];
+gexpr_list([],_St) -> [].
+
+exprs([E0|Es],St) ->
+ E1 = expr(E0,St),
+ [E1|exprs(Es,St)];
+exprs([],_St) -> [].
+
+expr({var,L,V},_St) ->
+ {var,L,V};
+% case index(V,St#pmod.parameters) of
+% N when N > 0 ->
+% {call,L,{remote,L,{atom,L,erlang},{atom,L,element}},
+% [{integer,L,N+1},{var,L,'THIS'}]};
+% _ ->
+% {var,L,V}
+% end;
+expr({integer,Line,I},_St) -> {integer,Line,I};
+expr({float,Line,F},_St) -> {float,Line,F};
+expr({atom,Line,A},_St) -> {atom,Line,A};
+expr({string,Line,S},_St) -> {string,Line,S};
+expr({char,Line,C},_St) -> {char,Line,C};
+expr({nil,Line},_St) -> {nil,Line};
+expr({cons,Line,H0,T0},St) ->
+ H1 = expr(H0,St),
+ T1 = expr(T0,St),
+ {cons,Line,H1,T1};
+expr({lc,Line,E0,Qs0},St) ->
+ Qs1 = lc_quals(Qs0,St),
+ E1 = expr(E0,St),
+ {lc,Line,E1,Qs1};
+expr({tuple,Line,Es0},St) ->
+ Es1 = expr_list(Es0,St),
+ {tuple,Line,Es1};
+expr({block,Line,Es0},St) ->
+ Es1 = exprs(Es0,St),
+ {block,Line,Es1};
+expr({'if',Line,Cs0},St) ->
+ Cs1 = icr_clauses(Cs0,St),
+ {'if',Line,Cs1};
+expr({'case',Line,E0,Cs0},St) ->
+ E1 = expr(E0,St),
+ Cs1 = icr_clauses(Cs0,St),
+ {'case',Line,E1,Cs1};
+expr({'receive',Line,Cs0},St) ->
+ Cs1 = icr_clauses(Cs0,St),
+ {'receive',Line,Cs1};
+expr({'receive',Line,Cs0,To0,ToEs0},St) ->
+ To1 = expr(To0,St),
+ ToEs1 = exprs(ToEs0,St),
+ Cs1 = icr_clauses(Cs0,St),
+ {'receive',Line,Cs1,To1,ToEs1};
+expr({'try',Line,Es0,Scs0,Ccs0,As0},St) ->
+ Es1 = exprs(Es0,St),
+ Scs1 = icr_clauses(Scs0,St),
+ Ccs1 = icr_clauses(Ccs0,St),
+ As1 = exprs(As0,St),
+ {'try',Line,Es1,Scs1,Ccs1,As1};
+expr({'fun',Line,Body,Info},St) ->
+ case Body of
+ {clauses,Cs0} ->
+ Cs1 = fun_clauses(Cs0,St),
+ {'fun',Line,{clauses,Cs1},Info};
+ {function,F,A} ->
+ {F1,A1} = update_function_name({F,A},St),
+ if A1 == A ->
+ {'fun',Line,{function,F,A},Info};
+ true ->
+ %% Must rewrite local fun-name to a fun that does a
+ %% call with the extra THIS parameter.
+ As = make_vars(A, Line),
+ As1 = As ++ [{var,Line,'THIS'}],
+ Call = {call,Line,{atom,Line,F1},As1},
+ Cs = [{clause,Line,As,[],[Call]}],
+ {'fun',Line,{clauses,Cs},Info}
+ end;
+ {function,M,F,A} -> %This is an error in lint!
+ {'fun',Line,{function,M,F,A},Info}
+ end;
+expr({call,Lc,{atom,_,new}=Name,As0},#pmod{parameters=Ps}=St)
+ when length(As0) =:= length(Ps) ->
+ %% The new() function does not take a 'THIS' argument (it's static).
+ As1 = expr_list(As0,St),
+ {call,Lc,Name,As1};
+expr({call,Lc,{atom,_,module_info}=Name,As0},St)
+ when length(As0) == 0; length(As0) == 1 ->
+ %% The module_info/0 and module_info/1 functions are also static.
+ As1 = expr_list(As0,St),
+ {call,Lc,Name,As1};
+expr({call,Lc,{atom,Lf,F},As0},St) ->
+ %% Local function call - needs THIS parameter.
+ As1 = expr_list(As0,St),
+ {call,Lc,{atom,Lf,F},As1 ++ [{var,0,'THIS'}]};
+expr({call,Line,F0,As0},St) ->
+ %% Other function call
+ F1 = expr(F0,St),
+ As1 = expr_list(As0,St),
+ {call,Line,F1,As1};
+expr({'catch',Line,E0},St) ->
+ E1 = expr(E0,St),
+ {'catch',Line,E1};
+expr({match,Line,P0,E0},St) ->
+ E1 = expr(E0,St),
+ P1 = pattern(P0,St),
+ {match,Line,P1,E1};
+expr({bin,Line,Fs},St) ->
+ Fs2 = pattern_grp(Fs,St),
+ {bin,Line,Fs2};
+expr({op,Line,Op,A0},St) ->
+ A1 = expr(A0,St),
+ {op,Line,Op,A1};
+expr({op,Line,Op,L0,R0},St) ->
+ L1 = expr(L0,St),
+ R1 = expr(R0,St),
+ {op,Line,Op,L1,R1};
+%% The following are not allowed to occur anywhere!
+expr({remote,Line,M0,F0},St) ->
+ M1 = expr(M0,St),
+ F1 = expr(F0,St),
+ {remote,Line,M1,F1}.
+
+expr_list([E0|Es],St) ->
+ E1 = expr(E0,St),
+ [E1|expr_list(Es,St)];
+expr_list([],_St) -> [].
+
+icr_clauses([C0|Cs],St) ->
+ C1 = clause(C0,St),
+ [C1|icr_clauses(Cs,St)];
+icr_clauses([],_St) -> [].
+
+lc_quals([{generate,Line,P0,E0}|Qs],St) ->
+ E1 = expr(E0,St),
+ P1 = pattern(P0,St),
+ [{generate,Line,P1,E1}|lc_quals(Qs,St)];
+lc_quals([E0|Qs],St) ->
+ E1 = expr(E0,St),
+ [E1|lc_quals(Qs,St)];
+lc_quals([],_St) -> [].
+
+fun_clauses([C0|Cs],St) ->
+ C1 = clause(C0,St),
+ [C1|fun_clauses(Cs,St)];
+fun_clauses([],_St) -> [].
+
+% %% Return index from 1 upwards, or 0 if not in the list.
+%
+% index(X,Ys) -> index(X,Ys,1).
+%
+% index(X,[X|Ys],A) -> A;
+% index(X,[Y|Ys],A) -> index(X,Ys,A+1);
+% index(X,[],A) -> 0.
+
+make_vars(N, L) ->
+ make_vars(1, N, L).
+
+make_vars(N, M, L) when N =< M ->
+ V = list_to_atom("X"++integer_to_list(N)),
+ [{var,L,V} | make_vars(N + 1, M, L)];
+make_vars(_, _, _) ->
+ [].
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_attributes.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_attributes.erl
new file mode 100644
index 0000000000..6e68611c66
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_attributes.erl
@@ -0,0 +1,212 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: sys_pre_attributes.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose : Transform Erlang compiler attributes
+
+-module(sys_pre_attributes).
+
+-export([parse_transform/2]).
+
+-define(OPTION_TAG, attributes).
+
+-record(state, {forms,
+ pre_ops = [],
+ post_ops = [],
+ options}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Inserts, deletes and replaces Erlang compiler attributes.
+%%
+%% Valid options are:
+%%
+%% {attribute, insert, AttrName, NewAttrVal}
+%% {attribute, replace, AttrName, NewAttrVal} % replace first occurrence
+%% {attribute, delete, AttrName}
+%%
+%% The transformation is performed in two passes:
+%%
+%% pre_transform
+%% -------------
+%% Searches for attributes in the list of Forms in order to
+%% delete or replace them. 'delete' will delete all occurrences
+%% of attributes with the given name. 'replace' will replace the
+%% first occurrence of the attribute. This pass is will only be
+%% performed if there are replace or delete operations stated
+%% as options.
+%%
+%% post_transform
+%% -------------
+%% Looks up the module attribute and inserts the new attributes
+%% directly after. This pass will only be performed if there are
+%% any attributes left to be inserted after pre_transform. The left
+%% overs will be those replace operations that not has been performed
+%% due to that the pre_transform pass did not find the attribute plus
+%% all insert operations.
+
+parse_transform(Forms, Options) ->
+ S = #state{forms = Forms, options = Options},
+ S2 = init_transform(S),
+ report_verbose("Pre options: ~p~n", [S2#state.pre_ops], S2),
+ report_verbose("Post options: ~p~n", [S2#state.post_ops], S2),
+ S3 = pre_transform(S2),
+ S4 = post_transform(S3),
+ S4#state.forms.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Computes the lists of pre_ops and post_ops that are
+%% used in the real transformation.
+init_transform(S) ->
+ case S#state.options of
+ Options when list(Options) ->
+ init_transform(Options, S);
+ Option ->
+ init_transform([Option], S)
+ end.
+
+init_transform([{attribute, insert, Name, Val} | Tail], S) ->
+ Op = {insert, Name, Val},
+ PostOps = [Op | S#state.post_ops],
+ init_transform(Tail, S#state{post_ops = PostOps});
+init_transform([{attribute, replace, Name, Val} | Tail], S) ->
+ Op = {replace, Name, Val},
+ PreOps = [Op | S#state.pre_ops],
+ PostOps = [Op | S#state.post_ops],
+ init_transform(Tail, S#state{pre_ops = PreOps, post_ops = PostOps});
+init_transform([{attribute, delete, Name} | Tail], S) ->
+ Op = {delete, Name},
+ PreOps = [Op | S#state.pre_ops],
+ init_transform(Tail, S#state{pre_ops = PreOps});
+init_transform([], S) ->
+ S;
+init_transform([_ | T], S) ->
+ init_transform(T, S);
+init_transform(BadOpt, S) ->
+ report_error("Illegal option (ignored): ~p~n", [BadOpt], S),
+ S.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Handle delete and perhaps replace
+
+pre_transform(S) when S#state.pre_ops == [] ->
+ S;
+pre_transform(S) ->
+ pre_transform(S#state.forms, [], S).
+
+pre_transform([H | T], Acc, S) ->
+ case H of
+ {attribute, Line, Name, Val} ->
+ case lists:keysearch(Name, 2, S#state.pre_ops) of
+ false ->
+ pre_transform(T, [H | Acc], S);
+
+ {value, {replace, Name, NewVal}} ->
+ report_warning("Replace attribute ~p: ~p -> ~p~n",
+ [Name, Val, NewVal],
+ S),
+ New = {attribute, Line, Name, NewVal},
+ Pre = lists:keydelete(Name, 2, S#state.pre_ops),
+ Post = lists:keydelete(Name, 2, S#state.post_ops),
+ S2 = S#state{pre_ops = Pre, post_ops = Post},
+ if
+ Pre == [] ->
+ %% No need to search the rest of the Forms
+ Forms = lists:reverse(Acc, [New | T]),
+ S2#state{forms = Forms};
+ true ->
+ pre_transform(T, [New | Acc], S2)
+ end;
+
+ {value, {delete, Name}} ->
+ report_warning("Delete attribute ~p: ~p~n",
+ [Name, Val],
+ S),
+ pre_transform(T, Acc, S)
+ end;
+ _Any ->
+ pre_transform(T, [H | Acc], S)
+ end;
+pre_transform([], Acc, S) ->
+ S#state{forms = lists:reverse(Acc)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Handle insert and perhaps replace
+
+post_transform(S) when S#state.post_ops == [] ->
+ S;
+post_transform(S) ->
+ post_transform(S#state.forms, [], S).
+
+post_transform([H | T], Acc, S) ->
+ case H of
+ {attribute, Line, module, Val} ->
+ Acc2 = lists:reverse([{attribute, Line, module, Val} | Acc]),
+ Forms = Acc2 ++ attrs(S#state.post_ops, Line, S) ++ T,
+ S#state{forms = Forms, post_ops = []};
+ _Any ->
+ post_transform(T, [H | Acc], S)
+ end;
+post_transform([], Acc, S) ->
+ S#state{forms = lists:reverse(Acc)}.
+
+attrs([{replace, Name, NewVal} | T], Line, S) ->
+ report_verbose("Insert attribute ~p: ~p~n", [Name, NewVal], S),
+ [{attribute, Line, Name, NewVal} | attrs(T, Line, S)];
+attrs([{insert, Name, NewVal} | T], Line, S) ->
+ report_verbose("Insert attribute ~p: ~p~n", [Name, NewVal], S),
+ [{attribute, Line, Name, NewVal} | attrs(T, Line, S)];
+attrs([], _, _) ->
+ [].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Report functions.
+%%
+%% Errors messages are controlled with the 'report_errors' compiler option
+%% Warning messages are controlled with the 'report_warnings' compiler option
+%% Verbose messages are controlled with the 'verbose' compiler option
+
+report_error(Format, Args, S) ->
+ case is_error(S) of
+ true ->
+ io:format("~p: * ERROR * " ++ Format, [?MODULE | Args]);
+ false ->
+ ok
+ end.
+
+report_warning(Format, Args, S) ->
+ case is_warning(S) of
+ true ->
+ io:format("~p: * WARNING * " ++ Format, [?MODULE | Args]);
+ false ->
+ ok
+ end.
+
+report_verbose(Format, Args, S) ->
+ case is_verbose(S) of
+ true ->
+ io:format("~p: " ++ Format, [?MODULE | Args]);
+ false ->
+ ok
+ end.
+
+is_error(S) ->
+ lists:member(report_errors, S#state.options) or is_verbose(S).
+
+is_warning(S) ->
+ lists:member(report_warnings, S#state.options) or is_verbose(S).
+
+is_verbose(S) ->
+ lists:member(verbose, S#state.options).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_expand.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_expand.erl
new file mode 100644
index 0000000000..5e7c1c8bbd
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/sys_pre_expand.erl
@@ -0,0 +1,1026 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: sys_pre_expand.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose : Expand some source Erlang constructions. This is part of the
+%% pre-processing phase.
+
+%% N.B. Although structs (tagged tuples) are not yet allowed in the
+%% language there is code included in pattern/2 and expr/3 (commented out)
+%% that handles them by transforming them to tuples.
+
+-module(sys_pre_expand).
+
+%% Main entry point.
+-export([module/2]).
+
+-import(ordsets, [from_list/1,add_element/2,
+ union/1,union/2,intersection/1,intersection/2,subtract/2]).
+-import(lists, [member/2,map/2,foldl/3,foldr/3,sort/1,reverse/1,duplicate/2]).
+
+-include("../my_include/erl_bits.hrl").
+
+-record(expand, {module=[], %Module name
+ parameters=undefined, %Module parameters
+ package="", %Module package
+ exports=[], %Exports
+ imports=[], %Imports
+ mod_imports, %Module Imports
+ compile=[], %Compile flags
+ records=dict:new(), %Record definitions
+ attributes=[], %Attributes
+ defined=[], %Defined functions
+ vcount=0, %Variable counter
+ func=[], %Current function
+ arity=[], %Arity for current function
+ fcount=0, %Local fun count
+ fun_index=0, %Global index for funs
+ bitdefault,
+ bittypes
+ }).
+
+%% module(Forms, CompileOptions)
+%% {ModuleName,Exports,TransformedForms}
+%% Expand the forms in one module. N.B.: the lists of predefined
+%% exports and imports are really ordsets!
+
+module(Fs, Opts) ->
+ %% Set pre-defined exported functions.
+ PreExp = [{module_info,0},{module_info,1}],
+
+ %% Set pre-defined module imports.
+ PreModImp = [{erlang,erlang},{packages,packages}],
+
+ %% Build initial expand record.
+ St0 = #expand{exports=PreExp,
+ mod_imports=dict:from_list(PreModImp),
+ compile=Opts,
+ defined=PreExp,
+ bitdefault = erl_bits:system_bitdefault(),
+ bittypes = erl_bits:system_bittypes()
+ },
+ %% Expand the functions.
+ {Tfs,St1} = forms(Fs, foldl(fun define_function/2, St0, Fs)),
+ {Efs,St2} = expand_pmod(Tfs, St1),
+ %% Get the correct list of exported functions.
+ Exports = case member(export_all, St2#expand.compile) of
+ true -> St2#expand.defined;
+ false -> St2#expand.exports
+ end,
+ %% Generate all functions from stored info.
+ {Ats,St3} = module_attrs(St2#expand{exports = Exports}),
+ {Mfs,St4} = module_predef_funcs(St3),
+ {St4#expand.module, St4#expand.exports, Ats ++ Efs ++ Mfs,
+ St4#expand.compile}.
+
+expand_pmod(Fs0, St) ->
+ case St#expand.parameters of
+ undefined ->
+ {Fs0,St};
+ Ps ->
+ {Fs1,Xs,Ds} = sys_expand_pmod:forms(Fs0, Ps,
+ St#expand.exports,
+ St#expand.defined),
+ A = length(Ps),
+ Vs = [{var,0,V} || V <- Ps],
+ N = {atom,0,St#expand.module},
+ B = [{tuple,0,[N|Vs]}],
+ F = {function,0,new,A,[{clause,0,Vs,[],B}]},
+ As = St#expand.attributes,
+ {[F|Fs1],St#expand{exports=add_element({new,A}, Xs),
+ defined=add_element({new,A}, Ds),
+ attributes = [{abstract, true} | As]}}
+ end.
+
+%% -type define_function(Form, State) -> State.
+%% Add function to defined if form a function.
+
+define_function({function,_,N,A,_Cs}, St) ->
+ St#expand{defined=add_element({N,A}, St#expand.defined)};
+define_function(_, St) -> St.
+
+module_attrs(St) ->
+ {[{attribute,0,Name,Val} || {Name,Val} <- St#expand.attributes],St}.
+
+module_predef_funcs(St) ->
+ PreDef = [{module_info,0},{module_info,1}],
+ PreExp = PreDef,
+ {[{function,0,module_info,0,
+ [{clause,0,[],[],
+ [{call,0,{remote,0,{atom,0,erlang},{atom,0,get_module_info}},
+ [{atom,0,St#expand.module}]}]}]},
+ {function,0,module_info,1,
+ [{clause,0,[{var,0,'X'}],[],
+ [{call,0,{remote,0,{atom,0,erlang},{atom,0,get_module_info}},
+ [{atom,0,St#expand.module},{var,0,'X'}]}]}]}],
+ St#expand{defined=union(from_list(PreDef), St#expand.defined),
+ exports=union(from_list(PreExp), St#expand.exports)}}.
+
+%% forms(Forms, State) ->
+%% {TransformedForms,State'}
+%% Process the forms. Attributes are lost and just affect the state.
+%% Ignore uninteresting forms like eof and type.
+
+forms([{attribute,_,Name,Val}|Fs0], St0) ->
+ St1 = attribute(Name, Val, St0),
+ forms(Fs0, St1);
+forms([{function,L,N,A,Cs}|Fs0], St0) ->
+ {Ff,St1} = function(L, N, A, Cs, St0),
+ {Fs,St2} = forms(Fs0, St1),
+ {[Ff|Fs],St2};
+forms([_|Fs], St) -> forms(Fs, St);
+forms([], St) -> {[],St}.
+
+%% -type attribute(Attribute, Value, State) ->
+%% State.
+%% Process an attribute, this just affects the state.
+
+attribute(module, {Module, As}, St) ->
+ M = package_to_string(Module),
+ St#expand{module=list_to_atom(M),
+ package = packages:strip_last(M),
+ parameters=As};
+attribute(module, Module, St) ->
+ M = package_to_string(Module),
+ St#expand{module=list_to_atom(M),
+ package = packages:strip_last(M)};
+attribute(export, Es, St) ->
+ St#expand{exports=union(from_list(Es), St#expand.exports)};
+attribute(import, Is, St) ->
+ import(Is, St);
+attribute(compile, C, St) when list(C) ->
+ St#expand{compile=St#expand.compile ++ C};
+attribute(compile, C, St) ->
+ St#expand{compile=St#expand.compile ++ [C]};
+attribute(record, {Name,Defs}, St) ->
+ St#expand{records=dict:store(Name, normalise_fields(Defs),
+ St#expand.records)};
+attribute(file, _File, St) -> St; %This is ignored
+attribute(Name, Val, St) when list(Val) ->
+ St#expand{attributes=St#expand.attributes ++ [{Name,Val}]};
+attribute(Name, Val, St) ->
+ St#expand{attributes=St#expand.attributes ++ [{Name,[Val]}]}.
+
+function(L, N, A, Cs0, St0) ->
+ {Cs,St} = clauses(Cs0, St0#expand{func=N,arity=A,fcount=0}),
+ {{function,L,N,A,Cs},St}.
+
+%% -type clauses([Clause], State) ->
+%% {[TransformedClause],State}.
+%% Expand function clauses.
+
+clauses([{clause,Line,H0,G0,B0}|Cs0], St0) ->
+ {H,Hvs,_Hus,St1} = head(H0, St0),
+ {G,Gvs,_Gus,St2} = guard(G0, Hvs, St1),
+ {B,_Bvs,_Bus,St3} = exprs(B0, union(Hvs, Gvs), St2),
+ {Cs,St4} = clauses(Cs0, St3),
+ {[{clause,Line,H,G,B}|Cs],St4};
+clauses([], St) -> {[],St}.
+
+%% head(HeadPatterns, State) ->
+%% {TransformedPatterns,Variables,UsedVariables,State'}
+
+head(As, St) -> pattern_list(As, St).
+
+%% pattern(Pattern, State) ->
+%% {TransformedPattern,Variables,UsedVariables,State'}
+%% BITS: added used variables for bit patterns with varaible length
+%%
+
+pattern({var,_,'_'}=Var, St) -> %Ignore anonymous variable.
+ {Var,[],[],St};
+pattern({var,_,V}=Var, St) ->
+ {Var,[V],[],St};
+pattern({char,_,_}=Char, St) ->
+ {Char,[],[],St};
+pattern({integer,_,_}=Int, St) ->
+ {Int,[],[],St};
+pattern({float,_,_}=Float, St) ->
+ {Float,[],[],St};
+pattern({atom,_,_}=Atom, St) ->
+ {Atom,[],[],St};
+pattern({string,_,_}=String, St) ->
+ {String,[],[],St};
+pattern({nil,_}=Nil, St) ->
+ {Nil,[],[],St};
+pattern({cons,Line,H,T}, St0) ->
+ {TH,THvs,Hus,St1} = pattern(H, St0),
+ {TT,TTvs,Tus,St2} = pattern(T, St1),
+ {{cons,Line,TH,TT},union(THvs, TTvs),union(Hus,Tus),St2};
+pattern({tuple,Line,Ps}, St0) ->
+ {TPs,TPsvs,Tus,St1} = pattern_list(Ps, St0),
+ {{tuple,Line,TPs},TPsvs,Tus,St1};
+%%pattern({struct,Line,Tag,Ps}, St0) ->
+%% {TPs,TPsvs,St1} = pattern_list(Ps, St0),
+%% {{tuple,Line,[{atom,Line,Tag}|TPs]},TPsvs,St1};
+pattern({record_field,_,_,_}=M, St) ->
+ {expand_package(M, St), [], [], St}; % must be a package name
+pattern({record_index,Line,Name,Field}, St) ->
+ {index_expr(Line, Field, Name, record_fields(Name, St)),[],[],St};
+pattern({record,Line,Name,Pfs}, St0) ->
+ Fs = record_fields(Name, St0),
+ {TMs,TMsvs,Us,St1} = pattern_list(pattern_fields(Fs, Pfs), St0),
+ {{tuple,Line,[{atom,Line,Name}|TMs]},TMsvs,Us,St1};
+pattern({bin,Line,Es0}, St0) ->
+ {Es1,Esvs,Esus,St1} = pattern_bin(Es0, St0),
+ {{bin,Line,Es1},Esvs,Esus,St1};
+pattern({op,_,'++',{nil,_},R}, St) ->
+ pattern(R, St);
+pattern({op,_,'++',{cons,Li,H,T},R}, St) ->
+ pattern({cons,Li,H,{op,Li,'++',T,R}}, St);
+pattern({op,_,'++',{string,Li,L},R}, St) ->
+ pattern(string_to_conses(Li, L, R), St);
+pattern({match,Line,Pat1, Pat2}, St0) ->
+ {TH,Hvt,Hus,St1} = pattern(Pat2, St0),
+ {TT,Tvt,Tus,St2} = pattern(Pat1, St1),
+ {{match,Line,TT,TH}, union(Hvt,Tvt), union(Hus,Tus), St2};
+%% Compile-time pattern expressions, including unary operators.
+pattern({op,Line,Op,A}, St) ->
+ { erl_eval:partial_eval({op,Line,Op,A}), [], [], St};
+pattern({op,Line,Op,L,R}, St) ->
+ { erl_eval:partial_eval({op,Line,Op,L,R}), [], [], St}.
+
+pattern_list([P0|Ps0], St0) ->
+ {P,Pvs,Pus,St1} = pattern(P0, St0),
+ {Ps,Psvs,Psus,St2} = pattern_list(Ps0, St1),
+ {[P|Ps],union(Pvs, Psvs),union(Pus, Psus),St2};
+pattern_list([], St) -> {[],[],[],St}.
+
+%% guard(Guard, VisibleVariables, State) ->
+%% {TransformedGuard,NewVariables,UsedVariables,State'}
+%% Transform a list of guard tests. We KNOW that this has been checked
+%% and what the guards test are. Use expr for transforming the guard
+%% expressions.
+
+guard([G0|Gs0], Vs, St0) ->
+ {G,Hvs,Hus,St1} = guard_tests(G0, Vs, St0),
+ {Gs,Tvs,Tus,St2} = guard(Gs0, Vs, St1),
+ {[G|Gs],union(Hvs, Tvs),union(Hus, Tus),St2};
+guard([], _, St) -> {[],[],[],St}.
+
+guard_tests([Gt0|Gts0], Vs, St0) ->
+ {Gt1,Gvs,Gus,St1} = guard_test(Gt0, Vs, St0),
+ {Gts1,Gsvs,Gsus,St2} = guard_tests(Gts0, union(Gvs, Vs), St1),
+ {[Gt1|Gts1],union(Gvs, Gsvs),union(Gus, Gsus),St2};
+guard_tests([], _, St) -> {[],[],[],St}.
+
+guard_test({call,Line,{atom,_,record},[A,{atom,_,Name}]}, Vs, St) ->
+ record_test_in_guard(Line, A, Name, Vs, St);
+guard_test({call,Line,{atom,Lt,Tname},As}, Vs, St) ->
+ %% XXX This is ugly. We can remove this workaround if/when
+ %% we'll allow 'andalso' in guards. For now, we must have
+ %% different code in guards and in bodies.
+ Test = {remote,Lt,
+ {atom,Lt,erlang},
+ {atom,Lt,normalise_test(Tname, length(As))}},
+ put(sys_pre_expand_in_guard, yes),
+ R = expr({call,Line,Test,As}, Vs, St),
+ erase(sys_pre_expand_in_guard),
+ R;
+guard_test(Test, Vs, St) ->
+ %% XXX See the previous clause.
+ put(sys_pre_expand_in_guard, yes),
+ R = expr(Test, Vs, St),
+ erase(sys_pre_expand_in_guard),
+ R.
+
+%% record_test(Line, Term, Name, Vs, St) -> TransformedExpr
+%% Generate code for is_record/1.
+
+record_test(Line, Term, Name, Vs, St) ->
+ case get(sys_pre_expand_in_guard) of
+ undefined ->
+ record_test_in_body(Line, Term, Name, Vs, St);
+ yes ->
+ record_test_in_guard(Line, Term, Name, Vs, St)
+ end.
+
+record_test_in_guard(Line, Term, Name, Vs, St) ->
+ %% Notes: (1) To keep is_record/3 properly atomic (e.g. when inverted
+ %% using 'not'), we cannot convert it to an instruction
+ %% sequence here. It must remain a single call.
+ %% (2) Later passes assume that the last argument (the size)
+ %% is a literal.
+ %% (3) We don't want calls to erlang:is_record/3 (in the source code)
+ %% confused we the internal instruction. (Reason: (2) above +
+ %% code bloat.)
+ %% (4) Xref may be run on the abstract code, so the name in the
+ %% abstract code must be erlang:is_record/3.
+ %% (5) To achive both (3) and (4) at the same time, set the name
+ %% here to erlang:is_record/3, but mark it as compiler-generated.
+ %% The v3_core pass will change the name to erlang:internal_is_record/3.
+ Fs = record_fields(Name, St),
+ expr({call,-Line,{remote,-Line,{atom,-Line,erlang},{atom,-Line,is_record}},
+ [Term,{atom,Line,Name},{integer,Line,length(Fs)+1}]},
+ Vs, St).
+
+record_test_in_body(Line, Expr, Name, Vs, St0) ->
+ %% As Expr may have side effects, we must evaluate it
+ %% first and bind the value to a new variable.
+ %% We must use also handle the case that Expr does not
+ %% evaluate to a tuple properly.
+ Fs = record_fields(Name, St0),
+ {Var,St} = new_var(Line, St0),
+
+ expr({block,Line,
+ [{match,Line,Var,Expr},
+ {op,Line,
+ 'andalso',
+ {call,Line,{atom,Line,is_tuple},[Var]},
+ {op,Line,'andalso',
+ {op,Line,'=:=',
+ {call,Line,{atom,Line,size},[Var]},
+ {integer,Line,length(Fs)+1}},
+ {op,Line,'=:=',
+ {call,Line,{atom,Line,element},[{integer,Line,1},Var]},
+ {atom,Line,Name}}}}]}, Vs, St).
+
+normalise_test(atom, 1) -> is_atom;
+normalise_test(binary, 1) -> is_binary;
+normalise_test(constant, 1) -> is_constant;
+normalise_test(float, 1) -> is_float;
+normalise_test(function, 1) -> is_function;
+normalise_test(integer, 1) -> is_integer;
+normalise_test(list, 1) -> is_list;
+normalise_test(number, 1) -> is_number;
+normalise_test(pid, 1) -> is_pid;
+normalise_test(port, 1) -> is_port;
+normalise_test(reference, 1) -> is_reference;
+normalise_test(tuple, 1) -> is_tuple;
+normalise_test(Name, _) -> Name.
+
+%% exprs(Expressions, VisibleVariables, State) ->
+%% {TransformedExprs,NewVariables,UsedVariables,State'}
+
+exprs([E0|Es0], Vs, St0) ->
+ {E,Evs,Eus,St1} = expr(E0, Vs, St0),
+ {Es,Esvs,Esus,St2} = exprs(Es0, union(Evs, Vs), St1),
+ {[E|Es],union(Evs, Esvs),union(Eus, Esus),St2};
+exprs([], _, St) -> {[],[],[],St}.
+
+%% expr(Expression, VisibleVariables, State) ->
+%% {TransformedExpression,NewVariables,UsedVariables,State'}
+
+expr({var,_,V}=Var, _Vs, St) ->
+ {Var,[],[V],St};
+expr({char,_,_}=Char, _Vs, St) ->
+ {Char,[],[],St};
+expr({integer,_,_}=Int, _Vs, St) ->
+ {Int,[],[],St};
+expr({float,_,_}=Float, _Vs, St) ->
+ {Float,[],[],St};
+expr({atom,_,_}=Atom, _Vs, St) ->
+ {Atom,[],[],St};
+expr({string,_,_}=String, _Vs, St) ->
+ {String,[],[],St};
+expr({nil,_}=Nil, _Vs, St) ->
+ {Nil,[],[],St};
+expr({cons,Line,H0,T0}, Vs, St0) ->
+ {H,Hvs,Hus,St1} = expr(H0, Vs, St0),
+ {T,Tvs,Tus,St2} = expr(T0, Vs, St1),
+ {{cons,Line,H,T},union(Hvs, Tvs),union(Hus, Tus),St2};
+expr({lc,Line,E0,Qs0}, Vs, St0) ->
+ {E1,Qs1,_,Lvs,Lus,St1} = lc_tq(Line, E0, Qs0, {nil,Line}, Vs, St0),
+ {{lc,Line,E1,Qs1},Lvs,Lus,St1};
+expr({tuple,Line,Es0}, Vs, St0) ->
+ {Es1,Esvs,Esus,St1} = expr_list(Es0, Vs, St0),
+ {{tuple,Line,Es1},Esvs,Esus,St1};
+%%expr({struct,Line,Tag,Es0}, Vs, St0) ->
+%% {Es1,Esvs,Esus,St1} = expr_list(Es0, Vs, St0),
+%% {{tuple,Line,[{atom,Line,Tag}|Es1]},Esvs,Esus,St1};
+expr({record_field,_,_,_}=M, _Vs, St) ->
+ {expand_package(M, St), [], [], St}; % must be a package name
+expr({record_index,Line,Name,F}, Vs, St) ->
+ I = index_expr(Line, F, Name, record_fields(Name, St)),
+ expr(I, Vs, St);
+expr({record,Line,Name,Is}, Vs, St) ->
+ expr({tuple,Line,[{atom,Line,Name}|
+ record_inits(record_fields(Name, St), Is)]},
+ Vs, St);
+expr({record_field,Line,R,Name,F}, Vs, St) ->
+ I = index_expr(Line, F, Name, record_fields(Name, St)),
+ expr({call,Line,{atom,Line,element},[I,R]}, Vs, St);
+expr({record,_,R,Name,Us}, Vs, St0) ->
+ {Ue,St1} = record_update(R, Name, record_fields(Name, St0), Us, St0),
+ expr(Ue, Vs, St1);
+expr({bin,Line,Es0}, Vs, St0) ->
+ {Es1,Esvs,Esus,St1} = expr_bin(Es0, Vs, St0),
+ {{bin,Line,Es1},Esvs,Esus,St1};
+expr({block,Line,Es0}, Vs, St0) ->
+ {Es,Esvs,Esus,St1} = exprs(Es0, Vs, St0),
+ {{block,Line,Es},Esvs,Esus,St1};
+expr({'if',Line,Cs0}, Vs, St0) ->
+ {Cs,Csvss,Csuss,St1} = icr_clauses(Cs0, Vs, St0),
+ All = new_in_all(Vs, Csvss),
+ {{'if',Line,Cs},All,union(Csuss),St1};
+expr({'case',Line,E0,Cs0}, Vs, St0) ->
+ {E,Evs,Eus,St1} = expr(E0, Vs, St0),
+ {Cs,Csvss,Csuss,St2} = icr_clauses(Cs0, union(Evs, Vs), St1),
+ All = new_in_all(Vs, Csvss),
+ {{'case',Line,E,Cs},union(Evs, All),union([Eus|Csuss]),St2};
+expr({'cond',Line,Cs}, Vs, St0) ->
+ {V,St1} = new_var(Line,St0),
+ expr(cond_clauses(Cs,V), Vs, St1);
+expr({'receive',Line,Cs0}, Vs, St0) ->
+ {Cs,Csvss,Csuss,St1} = icr_clauses(Cs0, Vs, St0),
+ All = new_in_all(Vs, Csvss),
+ {{'receive',Line,Cs},All,union(Csuss),St1};
+expr({'receive',Line,Cs0,To0,ToEs0}, Vs, St0) ->
+ {To,Tovs,Tous,St1} = expr(To0, Vs, St0),
+ {ToEs,ToEsvs,_ToEsus,St2} = exprs(ToEs0, Vs, St1),
+ {Cs,Csvss,Csuss,St3} = icr_clauses(Cs0, Vs, St2),
+ All = new_in_all(Vs, [ToEsvs|Csvss]),
+ {{'receive',Line,Cs,To,ToEs},union(Tovs, All),union([Tous|Csuss]),St3};
+expr({'fun',Line,Body}, Vs, St) ->
+ fun_tq(Line, Body, Vs, St);
+%%% expr({call,_,{atom,La,this_module},[]}, _Vs, St) ->
+%%% {{atom,La,St#expand.module}, [], [], St};
+%%% expr({call,_,{atom,La,this_package},[]}, _Vs, St) ->
+%%% {{atom,La,list_to_atom(St#expand.package)}, [], [], St};
+%%% expr({call,_,{atom,La,this_package},[{atom,_,Name}]}, _Vs, St) ->
+%%% M = packages:concat(St#expand.package,Name),
+%%% {{atom,La,list_to_atom(M)}, [], [], St};
+%%% expr({call,Line,{atom,La,this_package},[A]}, Vs, St) ->
+%%% M = {call,Line,{remote,La,{atom,La,packages},{atom,La,concat}},
+%%% [{string,La,St#expand.package}, A]},
+%%% expr({call,Line,{atom,Line,list_to_atom},[M]}, Vs, St);
+expr({call,Line,{atom,_,is_record},[A,{atom,_,Name}]}, Vs, St) ->
+ record_test(Line, A, Name, Vs, St);
+expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}},
+ [A,{atom,_,Name}]}, Vs, St) ->
+ record_test(Line, A, Name, Vs, St);
+expr({call,Line,{atom,La,N},As0}, Vs, St0) ->
+ {As,Asvs,Asus,St1} = expr_list(As0, Vs, St0),
+ Ar = length(As),
+ case erl_internal:bif(N, Ar) of
+ true ->
+ {{call,Line,{remote,La,{atom,La,erlang},{atom,La,N}},As},
+ Asvs,Asus,St1};
+ false ->
+ case imported(N, Ar, St1) of
+ {yes,Mod} ->
+ {{call,Line,{remote,La,{atom,La,Mod},{atom,La,N}},As},
+ Asvs,Asus,St1};
+ no ->
+ case {N,Ar} of
+ {record_info,2} ->
+ record_info_call(Line, As, St1);
+ _ ->
+ {{call,Line,{atom,La,N},As},Asvs,Asus,St1}
+ end
+ end
+ end;
+expr({call,Line,{record_field,_,_,_}=M,As0}, Vs, St0) ->
+ expr({call,Line,expand_package(M, St0),As0}, Vs, St0);
+expr({call,Line,{remote,Lr,M,F},As0}, Vs, St0) ->
+ M1 = expand_package(M, St0),
+ {[M2,F1|As1],Asvs,Asus,St1} = expr_list([M1,F|As0], Vs, St0),
+ {{call,Line,{remote,Lr,M2,F1},As1},Asvs,Asus,St1};
+expr({call,Line,{tuple,_,[{atom,_,_}=M,{atom,_,_}=F]},As}, Vs, St) ->
+ %% Rewrite {Mod,Function}(Args...) to Mod:Function(Args...).
+ expr({call,Line,{remote,Line,M,F},As}, Vs, St);
+expr({call,Line,F,As0}, Vs, St0) ->
+ {[Fun1|As1],Asvs,Asus,St1} = expr_list([F|As0], Vs, St0),
+ {{call,Line,Fun1,As1},Asvs,Asus,St1};
+expr({'try',Line,Es0,Scs0,Ccs0,As0}, Vs, St0) ->
+ {Es1,Esvs,Esus,St1} = exprs(Es0, Vs, St0),
+ Cvs = union(Esvs, Vs),
+ {Scs1,Scsvss,Scsuss,St2} = icr_clauses(Scs0, Cvs, St1),
+ {Ccs1,Ccsvss,Ccsuss,St3} = icr_clauses(Ccs0, Cvs, St2),
+ Csvss = Scsvss ++ Ccsvss,
+ Csuss = Scsuss ++ Ccsuss,
+ All = new_in_all(Vs, Csvss),
+ {As1,Asvs,Asus,St4} = exprs(As0, Cvs, St3),
+ {{'try',Line,Es1,Scs1,Ccs1,As1}, union([Asvs,Esvs,All]),
+ union([Esus,Asus|Csuss]), St4};
+expr({'catch',Line,E0}, Vs, St0) ->
+ %% Catch exports no new variables.
+ {E,_Evs,Eus,St1} = expr(E0, Vs, St0),
+ {{'catch',Line,E},[],Eus,St1};
+expr({match,Line,P0,E0}, Vs, St0) ->
+ {E,Evs,Eus,St1} = expr(E0, Vs, St0),
+ {P,Pvs,Pus,St2} = pattern(P0, St1),
+ {{match,Line,P,E},
+ union(subtract(Pvs, Vs), Evs),
+ union(intersection(Pvs, Vs), union(Eus,Pus)),St2};
+expr({op,L,'andalso',E1,E2}, Vs, St0) ->
+ {V,St1} = new_var(L,St0),
+ E = make_bool_switch(L,E1,V,
+ make_bool_switch(L,E2,V,{atom,L,true},
+ {atom,L,false}),
+ {atom,L,false}),
+ expr(E, Vs, St1);
+expr({op,L,'orelse',E1,E2}, Vs, St0) ->
+ {V,St1} = new_var(L,St0),
+ E = make_bool_switch(L,E1,V,{atom,L,true},
+ make_bool_switch(L,E2,V,{atom,L,true},
+ {atom,L,false})),
+ expr(E, Vs, St1);
+expr({op,Line,'++',{lc,Ll,E0,Qs0},M0}, Vs, St0) ->
+ {E1,Qs1,M1,Lvs,Lus,St1} = lc_tq(Ll, E0, Qs0, M0, Vs, St0),
+ {{op,Line,'++',{lc,Ll,E1,Qs1},M1},Lvs,Lus,St1};
+expr({op,_,'++',{string,L1,S1},{string,_,S2}}, _Vs, St) ->
+ {{string,L1,S1 ++ S2},[],[],St};
+expr({op,Ll,'++',{string,L1,S1}=Str,R0}, Vs, St0) ->
+ {R1,Rvs,Rus,St1} = expr(R0, Vs, St0),
+ E = case R1 of
+ {string,_,S2} -> {string,L1,S1 ++ S2};
+ _Other when length(S1) < 8 -> string_to_conses(L1, S1, R1);
+ _Other -> {op,Ll,'++',Str,R1}
+ end,
+ {E,Rvs,Rus,St1};
+expr({op,Ll,'++',{cons,Lc,H,T},L2}, Vs, St) ->
+ expr({cons,Ll,H,{op,Lc,'++',T,L2}}, Vs, St);
+expr({op,_,'++',{nil,_},L2}, Vs, St) ->
+ expr(L2, Vs, St);
+expr({op,Line,Op,A0}, Vs, St0) ->
+ {A,Avs,Aus,St1} = expr(A0, Vs, St0),
+ {{op,Line,Op,A},Avs,Aus,St1};
+expr({op,Line,Op,L0,R0}, Vs, St0) ->
+ {L,Lvs,Lus,St1} = expr(L0, Vs, St0),
+ {R,Rvs,Rus,St2} = expr(R0, Vs, St1),
+ {{op,Line,Op,L,R},union(Lvs, Rvs),union(Lus, Rus),St2}.
+
+expr_list([E0|Es0], Vs, St0) ->
+ {E,Evs,Eus,St1} = expr(E0, Vs, St0),
+ {Es,Esvs,Esus,St2} = expr_list(Es0, Vs, St1),
+ {[E|Es],union(Evs, Esvs),union(Eus, Esus),St2};
+expr_list([], _, St) ->
+ {[],[],[],St}.
+
+%% icr_clauses([Clause], [VisibleVariable], State) ->
+%% {[TransformedClause],[[NewVariable]],[[UsedVariable]],State'}
+%% Be very careful here to return the variables that are really used
+%% and really new.
+
+icr_clauses([], _, St) ->
+ {[],[[]],[],St};
+icr_clauses(Clauses, Vs, St) ->
+ icr_clauses2(Clauses, Vs, St).
+
+icr_clauses2([{clause,Line,H0,G0,B0}|Cs0], Vs, St0) ->
+ {H,Hvs,Hus,St1} = head(H0, St0), %Hvs is really used!
+ {G,Gvs,Gus,St2} = guard(G0, union(Hvs, Vs), St1),
+ {B,Bvs,Bus,St3} = exprs(B0, union([Vs,Hvs,Gvs]), St2),
+ New = subtract(union([Hvs,Gvs,Bvs]), Vs), %Really new
+ Used = intersection(union([Hvs,Hus,Gus,Bus]), Vs), %Really used
+ {Cs,Csvs,Csus,St4} = icr_clauses2(Cs0, Vs, St3),
+ {[{clause,Line,H,G,B}|Cs],[New|Csvs],[Used|Csus],St4};
+icr_clauses2([], _, St) ->
+ {[],[],[],St}.
+
+%% lc_tq(Line, Expr, Qualifiers, More, [VisibleVar], State) ->
+%% {TransExpr,[TransQual],TransMore,[NewVar],[UsedVar],State'}
+
+lc_tq(Line, E0, [{generate,Lg,P0,G0}|Qs0], M0, Vs, St0) ->
+ {G1,Gvs,Gus,St1} = expr(G0, Vs, St0),
+ {P1,Pvs,Pus,St2} = pattern(P0, St1),
+ {E1,Qs1,M1,Lvs,Lus,St3} = lc_tq(Line, E0, Qs0, M0, union(Pvs, Vs), St2),
+ {E1,[{generate,Lg,P1,G1}|Qs1],M1,
+ union(Gvs, Lvs),union([Gus,Pus,Lus]),St3};
+lc_tq(Line, E0, [F0|Qs0], M0, Vs, St0) ->
+ %% Allow record/2 and expand out as guard test.
+ case erl_lint:is_guard_test(F0) of
+ true ->
+ {F1,Fvs,_Fus,St1} = guard_tests([F0], Vs, St0),
+ {E1,Qs1,M1,Lvs,Lus,St2} = lc_tq(Line, E0, Qs0, M0, union(Fvs, Vs), St1),
+ {E1,F1++Qs1,M1,Lvs,Lus,St2};
+ false ->
+ {F1,Fvs,_Fus,St1} = expr(F0, Vs, St0),
+ {E1,Qs1,M1,Lvs,Lus,St2} = lc_tq(Line, E0, Qs0, M0, union(Fvs, Vs), St1),
+ {E1,[F1|Qs1],M1,Lvs,Lus,St2}
+ end;
+lc_tq(_Line, E0, [], M0, Vs, St0) ->
+ {E1,Evs,Eus,St1} = expr(E0, Vs, St0),
+ {M1,Mvs,Mus,St2} = expr(M0, Vs, St1),
+ {E1,[],M1,union(Evs, Mvs),union(Eus, Mus),St2}.
+
+%% fun_tq(Line, Body, VisibleVariables, State) ->
+%% {Fun,NewVariables,UsedVariables,State'}
+%% Transform an "explicit" fun {'fun', Line, {clauses, Cs}} into an
+%% extended form {'fun', Line, {clauses, Cs}, Info}, unless it is the
+%% name of a BIF (erl_lint has checked that it is not an import).
+%% Process the body sequence directly to get the new and used variables.
+%% "Implicit" funs {'fun', Line, {function, F, A}} are not changed.
+
+fun_tq(Lf, {function,F,A}, Vs, St0) ->
+ {As,St1} = new_vars(A, Lf, St0),
+ Cs = [{clause,Lf,As,[],[{call,Lf,{atom,Lf,F},As}]}],
+ case erl_internal:bif(F, A) of
+ true ->
+ fun_tq(Lf, {clauses,Cs}, Vs, St1);
+ false ->
+ Index = St0#expand.fun_index,
+ Uniq = erlang:hash(Cs, (1 bsl 27)-1),
+ {Fname,St2} = new_fun_name(St1),
+ {{'fun',Lf,{function,F,A},{Index,Uniq,Fname}},[],[],
+ St2#expand{fun_index=Index+1}}
+ end;
+fun_tq(Lf, {clauses,Cs0}, Vs, St0) ->
+ Uniq = erlang:hash(Cs0, (1 bsl 27)-1),
+ {Cs1,_Hvss,Frees,St1} = fun_clauses(Cs0, Vs, St0),
+ Ufrees = union(Frees),
+ Index = St1#expand.fun_index,
+ {Fname,St2} = new_fun_name(St1),
+ {{'fun',Lf,{clauses,Cs1},{Index,Uniq,Fname}},[],Ufrees,
+ St2#expand{fun_index=Index+1}}.
+
+fun_clauses([{clause,L,H0,G0,B0}|Cs0], Vs, St0) ->
+ {H,Hvs,Hus,St1} = head(H0, St0),
+ {G,Gvs,Gus,St2} = guard(G0, union(Hvs, Vs), St1),
+ {B,Bvs,Bus,St3} = exprs(B0, union([Vs,Hvs,Gvs]), St2),
+ %% Free variables cannot be new anywhere in the clause.
+ Free = subtract(union([Gus,Hus,Bus]), union([Hvs,Gvs,Bvs])),
+ %%io:format(" Gus :~p~n Bvs :~p~n Bus :~p~n Free:~p~n" ,[Gus,Bvs,Bus,Free]),
+ {Cs,Hvss,Frees,St4} = fun_clauses(Cs0, Vs, St3),
+ {[{clause,L,H,G,B}|Cs],[Hvs|Hvss],[Free|Frees],St4};
+fun_clauses([], _, St) -> {[],[],[],St}.
+
+%% new_fun_name(State) -> {FunName,State}.
+
+new_fun_name(#expand{func=F,arity=A,fcount=I}=St) ->
+ Name = "-" ++ atom_to_list(F) ++ "/" ++ integer_to_list(A)
+ ++ "-fun-" ++ integer_to_list(I) ++ "-",
+ {list_to_atom(Name),St#expand{fcount=I+1}}.
+
+
+%% normalise_fields([RecDef]) -> [Field].
+%% Normalise the field definitions to always have a default value. If
+%% none has been given then use 'undefined'.
+
+normalise_fields(Fs) ->
+ map(fun ({record_field,Lf,Field}) ->
+ {record_field,Lf,Field,{atom,Lf,undefined}};
+ (F) -> F end, Fs).
+
+%% record_fields(RecordName, State)
+%% find_field(FieldName, Fields)
+
+record_fields(R, St) -> dict:fetch(R, St#expand.records).
+
+find_field(F, [{record_field,_,{atom,_,F},Val}|_]) -> {ok,Val};
+find_field(F, [_|Fs]) -> find_field(F, Fs);
+find_field(_, []) -> error.
+
+%% field_names(RecFields) -> [Name].
+%% Return a list of the field names structures.
+
+field_names(Fs) ->
+ map(fun ({record_field,_,Field,_Val}) -> Field end, Fs).
+
+%% index_expr(Line, FieldExpr, Name, Fields) -> IndexExpr.
+%% Return an expression which evaluates to the index of a
+%% field. Currently only handle the case where the field is an
+%% atom. This expansion must be passed through expr again.
+
+index_expr(Line, {atom,_,F}, _Name, Fs) ->
+ {integer,Line,index_expr(F, Fs, 2)}.
+
+index_expr(F, [{record_field,_,{atom,_,F},_}|_], I) -> I;
+index_expr(F, [_|Fs], I) ->
+ index_expr(F, Fs, I+1).
+
+%% pattern_fields([RecDefField], [Match]) -> [Pattern].
+%% Build a list of match patterns for the record tuple elements.
+%% This expansion must be passed through pattern again. N.B. We are
+%% scanning the record definition field list!
+
+pattern_fields(Fs, Ms) ->
+ Wildcard = record_wildcard_init(Ms),
+ map(fun ({record_field,L,{atom,_,F},_}) ->
+ case find_field(F, Ms) of
+ {ok,Match} -> Match;
+ error when Wildcard =:= none -> {var,L,'_'};
+ error -> Wildcard
+ end end,
+ Fs).
+
+%% record_inits([RecDefField], [Init]) -> [InitExpr].
+%% Build a list of initialisation expressions for the record tuple
+%% elements. This expansion must be passed through expr
+%% again. N.B. We are scanning the record definition field list!
+
+record_inits(Fs, Is) ->
+ WildcardInit = record_wildcard_init(Is),
+ map(fun ({record_field,_,{atom,_,F},D}) ->
+ case find_field(F, Is) of
+ {ok,Init} -> Init;
+ error when WildcardInit =:= none -> D;
+ error -> WildcardInit
+ end end,
+ Fs).
+
+record_wildcard_init([{record_field,_,{var,_,'_'},D}|_]) -> D;
+record_wildcard_init([_|Is]) -> record_wildcard_init(Is);
+record_wildcard_init([]) -> none.
+
+%% record_update(Record, RecordName, [RecDefField], [Update], State) ->
+%% {Expr,State'}
+%% Build an expression to update fields in a record returning a new
+%% record. Try to be smart and optimise this. This expansion must be
+%% passed through expr again.
+
+record_update(R, Name, Fs, Us0, St0) ->
+ Line = element(2, R),
+ {Pre,Us,St1} = record_exprs(Us0, St0),
+ Nf = length(Fs), %# of record fields
+ Nu = length(Us), %# of update fields
+ Nc = Nf - Nu, %# of copy fields
+
+ %% We need a new variable for the record expression
+ %% to guarantee that it is only evaluated once.
+ {Var,St2} = new_var(Line, St1),
+
+ %% Try to be intelligent about which method of updating record to use.
+ {Update,St} =
+ if
+ Nu == 0 -> {R,St2}; %No fields updated
+ Nu =< Nc -> %Few fields updated
+ {record_setel(Var, Name, Fs, Us), St2};
+ true -> %The wide area inbetween
+ record_match(Var, Name, Fs, Us, St2)
+ end,
+ {{block,element(2, R),Pre ++ [{match,Line,Var,R},Update]},St}.
+
+%% record_match(Record, RecordName, [RecDefField], [Update], State)
+%% Build a 'case' expression to modify record fields.
+
+record_match(R, Name, Fs, Us, St0) ->
+ {Ps,News,St1} = record_upd_fs(Fs, Us, St0),
+ Lr = element(2, hd(Us)),
+ {{'case',Lr,R,
+ [{clause,Lr,[{tuple,Lr,[{atom,Lr,Name}|Ps]}],[],
+ [{tuple,Lr,[{atom,Lr,Name}|News]}]},
+ {clause,Lr,[{var,Lr,'_'}],[],
+ [call_error(Lr, {tuple,Lr,[{atom,Lr,badrecord},{atom,Lr,Name}]})]}
+ ]},
+ St1}.
+
+record_upd_fs([{record_field,Lf,{atom,_La,F},_Val}|Fs], Us, St0) ->
+ {P,St1} = new_var(Lf, St0),
+ {Ps,News,St2} = record_upd_fs(Fs, Us, St1),
+ case find_field(F, Us) of
+ {ok,New} -> {[P|Ps],[New|News],St2};
+ error -> {[P|Ps],[P|News],St2}
+ end;
+record_upd_fs([], _, St) -> {[],[],St}.
+
+%% record_setel(Record, RecordName, [RecDefField], [Update])
+%% Build a nested chain of setelement calls to build the
+%% updated record tuple.
+
+record_setel(R, Name, Fs, Us0) ->
+ Us1 = foldl(fun ({record_field,Lf,Field,Val}, Acc) ->
+ I = index_expr(Lf, Field, Name, Fs),
+ [{I,Lf,Val}|Acc]
+ end, [], Us0),
+ Us = sort(Us1),
+ Lr = element(2, hd(Us)),
+ Wildcards = duplicate(length(Fs), {var,Lr,'_'}),
+ {'case',Lr,R,
+ [{clause,Lr,[{tuple,Lr,[{atom,Lr,Name}|Wildcards]}],[],
+ [foldr(fun ({I,Lf,Val}, Acc) ->
+ {call,Lf,{atom,Lf,setelement},[I,Acc,Val]} end,
+ R, Us)]},
+ {clause,Lr,[{var,Lr,'_'}],[],
+ [call_error(Lr, {tuple,Lr,[{atom,Lr,badrecord},{atom,Lr,Name}]})]}]}.
+
+%% Expand a call to record_info/2. We have checked that it is not
+%% shadowed by an import.
+
+record_info_call(Line, [{atom,_Li,Info},{atom,_Ln,Name}], St) ->
+ case Info of
+ size ->
+ {{integer,Line,1+length(record_fields(Name, St))},[],[],St};
+ fields ->
+ {make_list(field_names(record_fields(Name, St)), Line),
+ [],[],St}
+ end.
+
+%% Break out expressions from an record update list and bind to new
+%% variables. The idea is that we will evaluate all update expressions
+%% before starting to update the record.
+
+record_exprs(Us, St) ->
+ record_exprs(Us, St, [], []).
+
+record_exprs([{record_field,Lf,{atom,_La,_F}=Name,Val}=Field0|Us], St0, Pre, Fs) ->
+ case is_simple_val(Val) of
+ true ->
+ record_exprs(Us, St0, Pre, [Field0|Fs]);
+ false ->
+ {Var,St} = new_var(Lf, St0),
+ Bind = {match,Lf,Var,Val},
+ Field = {record_field,Lf,Name,Var},
+ record_exprs(Us, St, [Bind|Pre], [Field|Fs])
+ end;
+record_exprs([], St, Pre, Fs) ->
+ {reverse(Pre),Fs,St}.
+
+is_simple_val({var,_,_}) -> true;
+is_simple_val({atom,_,_}) -> true;
+is_simple_val({integer,_,_}) -> true;
+is_simple_val({float,_,_}) -> true;
+is_simple_val({nil,_}) -> true;
+is_simple_val(_) -> false.
+
+%% pattern_bin([Element], State) -> {[Element],[Variable],[UsedVar],State}.
+
+pattern_bin(Es0, St) ->
+ Es1 = bin_expand_strings(Es0),
+ foldr(fun (E, Acc) -> pattern_element(E, Acc) end, {[],[],[],St}, Es1).
+
+pattern_element({bin_element,Line,Expr,Size,Type}, {Es,Esvs,Esus,St0}) ->
+ {Expr1,Vs1,Us1,St1} = pattern(Expr, St0),
+ {Size1,Vs2,Us2,St2} = pat_bit_size(Size, St1),
+ {Size2,Type1} = make_bit_type(Line, Size1,Type),
+ {[{bin_element,Line,Expr1,Size2,Type1}|Es],
+ union([Vs1,Vs2,Esvs]),union([Us1,Us2,Esus]),St2}.
+
+pat_bit_size(default, St) -> {default,[],[],St};
+pat_bit_size({atom,_La,all}=All, St) -> {All,[],[],St};
+pat_bit_size({var,_Lv,V}=Var, St) -> {Var,[],[V],St};
+pat_bit_size(Size, St) ->
+ Line = element(2, Size),
+ {value,Sz,_} = erl_eval:expr(Size, erl_eval:new_bindings()),
+ {{integer,Line,Sz},[],[],St}.
+
+make_bit_type(Line, default, Type0) ->
+ case erl_bits:set_bit_type(default, Type0) of
+ {ok,all,Bt} -> {{atom,Line,all},erl_bits:as_list(Bt)};
+ {ok,Size,Bt} -> {{integer,Line,Size},erl_bits:as_list(Bt)}
+ end;
+make_bit_type(_Line, Size, Type0) -> %Integer or 'all'
+ {ok,Size,Bt} = erl_bits:set_bit_type(Size, Type0),
+ {Size,erl_bits:as_list(Bt)}.
+
+%% expr_bin([Element], [VisibleVar], State) ->
+%% {[Element],[NewVar],[UsedVar],State}.
+
+expr_bin(Es0, Vs, St) ->
+ Es1 = bin_expand_strings(Es0),
+ foldr(fun (E, Acc) -> bin_element(E, Vs, Acc) end, {[],[],[],St}, Es1).
+
+bin_element({bin_element,Line,Expr,Size,Type}, Vs, {Es,Esvs,Esus,St0}) ->
+ {Expr1,Vs1,Us1,St1} = expr(Expr, Vs, St0),
+ {Size1,Vs2,Us2,St2} = if Size == default -> {default,[],[],St1};
+ true -> expr(Size, Vs, St1)
+ end,
+ {Size2,Type1} = make_bit_type(Line, Size1, Type),
+ {[{bin_element,Line,Expr1,Size2,Type1}|Es],
+ union([Vs1,Vs2,Esvs]),union([Us1,Us2,Esus]),St2}.
+
+bin_expand_strings(Es) ->
+ foldr(fun ({bin_element,Line,{string,_,S},default,default}, Es1) ->
+ foldr(fun (C, Es2) ->
+ [{bin_element,Line,{char,Line,C},default,default}|Es2]
+ end, Es1, S);
+ (E, Es1) -> [E|Es1]
+ end, [], Es).
+
+%% new_var_name(State) -> {VarName,State}.
+
+new_var_name(St) ->
+ C = St#expand.vcount,
+ {list_to_atom("pre" ++ integer_to_list(C)),St#expand{vcount=C+1}}.
+
+%% new_var(Line, State) -> {Var,State}.
+
+new_var(L, St0) ->
+ {New,St1} = new_var_name(St0),
+ {{var,L,New},St1}.
+
+%% new_vars(Count, Line, State) -> {[Var],State}.
+%% Make Count new variables.
+
+new_vars(N, L, St) -> new_vars(N, L, St, []).
+
+new_vars(N, L, St0, Vs) when N > 0 ->
+ {V,St1} = new_var(L, St0),
+ new_vars(N-1, L, St1, [V|Vs]);
+new_vars(0, _L, St, Vs) -> {Vs,St}.
+
+%% make_list(TermList, Line) -> ConsTerm.
+
+make_list(Ts, Line) ->
+ foldr(fun (H, T) -> {cons,Line,H,T} end, {nil,Line}, Ts).
+
+string_to_conses(Line, Cs, Tail) ->
+ foldr(fun (C, T) -> {cons,Line,{char,Line,C},T} end, Tail, Cs).
+
+
+%% In syntax trees, module/package names are atoms or lists of atoms.
+
+package_to_string(A) when atom(A) -> atom_to_list(A);
+package_to_string(L) when list(L) -> packages:concat(L).
+
+expand_package({atom,L,A} = M, St) ->
+ case dict:find(A, St#expand.mod_imports) of
+ {ok, A1} ->
+ {atom,L,A1};
+ error ->
+ case packages:is_segmented(A) of
+ true ->
+ M;
+ false ->
+ M1 = packages:concat(St#expand.package, A),
+ {atom,L,list_to_atom(M1)}
+ end
+ end;
+expand_package(M, _St) ->
+ case erl_parse:package_segments(M) of
+ error ->
+ M;
+ M1 ->
+ {atom,element(2,M),list_to_atom(package_to_string(M1))}
+ end.
+
+%% Create a case-switch on true/false, generating badarg for all other
+%% values.
+
+make_bool_switch(L, E, V, T, F) ->
+ make_bool_switch_1(L, E, V, [T], [F]).
+
+make_bool_switch_1(L, E, V, T, F) ->
+ case get(sys_pre_expand_in_guard) of
+ undefined -> make_bool_switch_body(L, E, V, T, F);
+ yes -> make_bool_switch_guard(L, E, V, T, F)
+ end.
+
+make_bool_switch_guard(_, E, _, [{atom,_,true}], [{atom,_,false}]) -> E;
+make_bool_switch_guard(L, E, V, T, F) ->
+ NegL = -abs(L),
+ {'case',NegL,E,
+ [{clause,NegL,[{atom,NegL,true}],[],T},
+ {clause,NegL,[{atom,NegL,false}],[],F},
+ {clause,NegL,[V],[],[V]}
+ ]}.
+
+make_bool_switch_body(L, E, V, T, F) ->
+ NegL = -abs(L),
+ {'case',NegL,E,
+ [{clause,NegL,[{atom,NegL,true}],[],T},
+ {clause,NegL,[{atom,NegL,false}],[],F},
+ {clause,NegL,[V],[],
+ [call_error(NegL,{tuple,NegL,[{atom,NegL,badarg},V]})]}
+ ]}.
+
+%% Expand a list of cond-clauses to a sequence of case-switches.
+
+cond_clauses([{clause,L,[],[[E]],B}],V) ->
+ make_bool_switch_1(L,E,V,B,[call_error(L,{atom,L,cond_clause})]);
+cond_clauses([{clause,L,[],[[E]],B} | Cs],V) ->
+ make_bool_switch_1(L,E,V,B,[cond_clauses(Cs,V)]).
+
+%% call_error(Line, Reason) -> Expr.
+%% Build a call to erlang:error/1 with reason Reason.
+
+call_error(L, R) ->
+ {call,L,{remote,L,{atom,L,erlang},{atom,L,error}},[R]}.
+
+%% new_in_all(Before, RegionList) -> NewInAll
+%% Return the variables new in all clauses.
+
+new_in_all(Before, Region) ->
+ InAll = intersection(Region),
+ subtract(InAll, Before).
+
+%% import(Line, Imports, State) ->
+%% State'
+%% imported(Name, Arity, State) ->
+%% {yes,Module} | no
+%% Handle import declarations and est for imported functions. No need to
+%% check when building imports as code is correct.
+
+import({Mod0,Fs}, St) ->
+ Mod = list_to_atom(package_to_string(Mod0)),
+ Mfs = from_list(Fs),
+ St#expand{imports=add_imports(Mod, Mfs, St#expand.imports)};
+import(Mod0, St) ->
+ Mod = package_to_string(Mod0),
+ Key = list_to_atom(packages:last(Mod)),
+ St#expand{mod_imports=dict:store(Key, list_to_atom(Mod),
+ St#expand.mod_imports)}.
+
+add_imports(Mod, [F|Fs], Is) ->
+ add_imports(Mod, Fs, orddict:store(F, Mod, Is));
+add_imports(_, [], Is) -> Is.
+
+imported(F, A, St) ->
+ case orddict:find({F,A}, St#expand.imports) of
+ {ok,Mod} -> {yes,Mod};
+ error -> no
+ end.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_codegen.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_codegen.erl
new file mode 100644
index 0000000000..2af4d94655
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_codegen.erl
@@ -0,0 +1,1755 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: v3_codegen.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose : Code generator for Beam.
+
+%% The following assumptions have been made:
+%%
+%% 1. Matches, i.e. things with {match,M,Ret} wrappers, only return
+%% values; no variables are exported. If the match would have returned
+%% extra variables then these have been transformed to multiple return
+%% values.
+%%
+%% 2. All BIF's called in guards are gc-safe so there is no need to
+%% put thing on the stack in the guard. While this would in principle
+%% work it would be difficult to keep track of the stack depth when
+%% trimming.
+%%
+%% The code generation uses variable lifetime information added by
+%% the v3_life module to save variables, allocate registers and
+%% move registers to the stack when necessary.
+%%
+%% We try to use a consistent variable name scheme throughout. The
+%% StackReg record is always called Bef,Int<n>,Aft.
+
+-module(v3_codegen).
+
+%% The main interface.
+-export([module/2]).
+
+-import(lists, [member/2,keymember/3,keysort/2,keysearch/3,append/1,
+ map/2,flatmap/2,foldl/3,foldr/3,mapfoldl/3,
+ sort/1,reverse/1,reverse/2]).
+-import(v3_life, [vdb_find/2]).
+
+%%-compile([export_all]).
+
+-include("v3_life.hrl").
+
+%% Main codegen structure.
+-record(cg, {lcount=1, %Label counter
+ mod, %Current module
+ func, %Current function
+ finfo, %Function info label
+ fcode, %Function code label
+ btype, %Type of bif used.
+ bfail, %Fail label of bif
+ break, %Break label
+ recv, %Receive label
+ is_top_block, %Boolean: top block or not
+ functable = [], %Table of local functions:
+ %[{{Name, Arity}, Label}...]
+ in_catch=false, %Inside a catch or not.
+ need_frame, %Need a stack frame.
+ new_funs=true}). %Generate new fun instructions.
+
+%% Stack/register state record.
+-record(sr, {reg=[], %Register table
+ stk=[], %Stack table
+ res=[]}). %Reserved regs: [{reserved,I,V}]
+
+module({Mod,Exp,Attr,Forms}, Options) ->
+ NewFunsFlag = not member(no_new_funs, Options),
+ {Fs,St} = functions(Forms, #cg{mod=Mod,new_funs=NewFunsFlag}),
+ {ok,{Mod,Exp,Attr,Fs,St#cg.lcount}}.
+
+functions(Forms, St0) ->
+ mapfoldl(fun (F, St) -> function(F, St) end, St0#cg{lcount=1}, Forms).
+
+function({function,Name,Arity,As0,Vb,Vdb}, St0) ->
+ %%ok = io:fwrite("cg ~w:~p~n", [?LINE,{Name,Arity}]),
+ St1 = St0#cg{func={Name,Arity}},
+ {Fun,St2} = cg_fun(Vb, As0, Vdb, St1),
+ Func0 = {function,Name,Arity,St2#cg.fcode,Fun},
+ Func = bs_function(Func0),
+ {Func,St2}.
+
+%% cg_fun([Lkexpr], [HeadVar], Vdb, State) -> {[Ainstr],State}
+
+cg_fun(Les, Hvs, Vdb, St0) ->
+ {Name,Arity} = St0#cg.func,
+ {Fi,St1} = new_label(St0), %FuncInfo label
+ {Fl,St2} = local_func_label(Name, Arity, St1),
+ %% Create initial stack/register state, clear unused arguments.
+ Bef = clear_dead(#sr{reg=foldl(fun ({var,V}, Reg) ->
+ put_reg(V, Reg)
+ end, [], Hvs),
+ stk=[]}, 0, Vdb),
+ {B2,_Aft,St3} = cg_list(Les, 0, Vdb, Bef, St2#cg{btype=exit,
+ bfail=Fi,
+ finfo=Fi,
+ fcode=Fl,
+ is_top_block=true}),
+ A = [{label,Fi},{func_info,{atom,St3#cg.mod},{atom,Name},Arity},
+ {label,Fl}|B2],
+ {A,St3}.
+
+%% cg(Lkexpr, Vdb, StackReg, State) -> {[Ainstr],StackReg,State}.
+%% Generate code for a kexpr.
+%% Split function into two steps for clarity, not efficiency.
+
+cg(Le, Vdb, Bef, St) ->
+ cg(Le#l.ke, Le, Vdb, Bef, St).
+
+cg({block,Es}, Le, Vdb, Bef, St) ->
+ block_cg(Es, Le, Vdb, Bef, St);
+cg({match,M,Rs}, Le, Vdb, Bef, St) ->
+ match_cg(M, Rs, Le, Vdb, Bef, St);
+cg({match_fail,F}, Le, Vdb, Bef, St) ->
+ match_fail_cg(F, Le, Vdb, Bef, St);
+cg({call,Func,As,Rs}, Le, Vdb, Bef, St) ->
+ call_cg(Func, As, Rs, Le, Vdb, Bef, St);
+cg({enter,Func,As}, Le, Vdb, Bef, St) ->
+ enter_cg(Func, As, Le, Vdb, Bef, St);
+cg({bif,Bif,As,Rs}, Le, Vdb, Bef, St) ->
+ bif_cg(Bif, As, Rs, Le, Vdb, Bef, St);
+cg({receive_loop,Te,Rvar,Rm,Tes,Rs}, Le, Vdb, Bef, St) ->
+ recv_loop_cg(Te, Rvar, Rm, Tes, Rs, Le, Vdb, Bef, St);
+cg(receive_next, Le, Vdb, Bef, St) ->
+ recv_next_cg(Le, Vdb, Bef, St);
+cg(receive_accept, _Le, _Vdb, Bef, St) -> {[remove_message],Bef,St};
+cg({'try',Ta,Vs,Tb,Evs,Th,Rs}, Le, Vdb, Bef, St) ->
+ try_cg(Ta, Vs, Tb, Evs, Th, Rs, Le, Vdb, Bef, St);
+cg({'catch',Cb,R}, Le, Vdb, Bef, St) ->
+ catch_cg(Cb, R, Le, Vdb, Bef, St);
+cg({set,Var,Con}, Le, Vdb, Bef, St) -> set_cg(Var, Con, Le, Vdb, Bef, St);
+cg({return,Rs}, Le, Vdb, Bef, St) -> return_cg(Rs, Le, Vdb, Bef, St);
+cg({break,Bs}, Le, Vdb, Bef, St) -> break_cg(Bs, Le, Vdb, Bef, St);
+cg({need_heap,0}, _Le, _Vdb, Bef, St) ->
+ {[],Bef,St};
+cg({need_heap,H}, _Le, _Vdb, Bef, St) ->
+ {[{test_heap,H,max_reg(Bef#sr.reg)}],Bef,St}.
+
+%% cg_list([Kexpr], FirstI, Vdb, StackReg, St) -> {[Ainstr],StackReg,St}.
+
+cg_list(Kes, I, Vdb, Bef, St0) ->
+ {Keis,{Aft,St1}} =
+ flatmapfoldl(fun (Ke, {Inta,Sta}) ->
+% ok = io:fwrite(" %% ~p\n", [Inta]),
+% ok = io:fwrite("cgl:~p\n", [Ke]),
+ {Keis,Intb,Stb} = cg(Ke, Vdb, Inta, Sta),
+% ok = io:fwrite(" ~p\n", [Keis]),
+% ok = io:fwrite(" %% ~p\n", [Intb]),
+ {comment(Inta) ++ Keis,{Intb,Stb}}
+ end, {Bef,St0}, need_heap(Kes, I)),
+ {Keis,Aft,St1}.
+
+%% need_heap([Lkexpr], I, BifType) -> [Lkexpr].
+%% Insert need_heap instructions in Kexpr list. Try to be smart and
+%% collect them together as much as possible.
+
+need_heap(Kes0, I) ->
+ {Kes1,{H,F}} = flatmapfoldr(fun (Ke, {H0,F0}) ->
+ {Ns,H1,F1} = need_heap_1(Ke, H0, F0),
+ {[Ke|Ns],{H1,F1}}
+ end, {0,false}, Kes0),
+ %% Prepend need_heap if necessary.
+ Kes2 = need_heap_need(I, H, F) ++ Kes1,
+% ok = io:fwrite("need_heap: ~p~n",
+% [{{H,F},
+% map(fun (#l{ke={match,M,Rs}}) -> match;
+% (Lke) -> Lke#l.ke end, Kes2)}]),
+ Kes2.
+
+need_heap_1(#l{ke={set,_,{binary,_}},i=I}, H, F) ->
+ {need_heap_need(I, H, F),0,false};
+need_heap_1(#l{ke={set,_,Val}}, H, F) ->
+ %% Just pass through adding to needed heap.
+ {[],H + case Val of
+ {cons,_} -> 2;
+ {tuple,Es} -> 1 + length(Es);
+ {string,S} -> 2 * length(S);
+ _Other -> 0
+ end,F};
+need_heap_1(#l{ke={call,_Func,_As,_Rs},i=I}, H, F) ->
+ %% Calls generate a need if necessary and also force one.
+ {need_heap_need(I, H, F),0,true};
+need_heap_1(#l{ke={bif,dsetelement,_As,_Rs},i=I}, H, F) ->
+ {need_heap_need(I, H, F),0,true};
+need_heap_1(#l{ke={bif,{make_fun,_,_,_,_},_As,_Rs},i=I}, H, F) ->
+ {need_heap_need(I, H, F),0,true};
+need_heap_1(#l{ke={bif,_Bif,_As,_Rs}}, H, F) ->
+ {[],H,F};
+need_heap_1(#l{i=I}, H, F) ->
+ %% Others kexprs generate a need if necessary but don't force.
+ {need_heap_need(I, H, F),0,false}.
+
+need_heap_need(_I, 0, false) -> [];
+need_heap_need(I, H, _F) -> [#l{ke={need_heap,H},i=I}].
+
+
+%% match_cg(Match, [Ret], Le, Vdb, StackReg, State) ->
+%% {[Ainstr],StackReg,State}.
+%% Generate code for a match. First save all variables on the stack
+%% that are to survive after the match. We leave saved variables in
+%% their registers as they might actually be in the right place.
+%% Should test this.
+
+match_cg(M, Rs, Le, Vdb, Bef, St0) ->
+ I = Le#l.i,
+ {Sis,Int0} = adjust_stack(Bef, I, I+1, Vdb),
+ {B,St1} = new_label(St0),
+ {Mis,Int1,St2} = match_cg(M, none, Int0, St1#cg{break=B}),
+ %% Put return values in registers.
+ Reg = load_vars(Rs, Int1#sr.reg),
+ {Sis ++ Mis ++ [{label,B}],
+ clear_dead(Int1#sr{reg=Reg}, I, Vdb),
+ St2#cg{break=St1#cg.break}}.
+
+%% match_cg(Match, Fail, StackReg, State) -> {[Ainstr],StackReg,State}.
+%% Generate code for a match tree. N.B. there is no need pass Vdb
+%% down as each level which uses this takes its own internal Vdb not
+%% the outer one.
+
+match_cg(Le, Fail, Bef, St) ->
+ match_cg(Le#l.ke, Le, Fail, Bef, St).
+
+match_cg({alt,F,S}, _Le, Fail, Bef, St0) ->
+ {Tf,St1} = new_label(St0),
+ {Fis,Faft,St2} = match_cg(F, Tf, Bef, St1),
+ {Sis,Saft,St3} = match_cg(S, Fail, Bef, St2),
+ Aft = sr_merge(Faft, Saft),
+ {Fis ++ [{label,Tf}] ++ Sis,Aft,St3};
+match_cg({select,V,Scs}, _Va, Fail, Bef, St) ->
+ match_fmf(fun (S, F, Sta) ->
+ select_cg(S, V, F, Fail, Bef, Sta) end,
+ Fail, St, Scs);
+match_cg({guard,Gcs}, _Le, Fail, Bef, St) ->
+ match_fmf(fun (G, F, Sta) -> guard_clause_cg(G, F, Bef, Sta) end,
+ Fail, St, Gcs);
+match_cg({block,Es}, Le, _Fail, Bef, St) ->
+ %% Must clear registers and stack of dead variables.
+ Int = clear_dead(Bef, Le#l.i, Le#l.vdb),
+ block_cg(Es, Le, Int, St).
+
+%% match_fail_cg(FailReason, Le, Vdb, StackReg, State) ->
+%% {[Ainstr],StackReg,State}.
+%% Generate code for the match_fail "call". N.B. there is no generic
+%% case for when the fail value has been created elsewhere.
+
+match_fail_cg({function_clause,As}, Le, Vdb, Bef, St) ->
+ %% Must have the args in {x,0}, {x,1},...
+ {Sis,Int} = cg_setup_call(As, Bef, Le#l.i, Vdb),
+ {Sis ++ [{jump,{f,St#cg.finfo}}],
+ Int#sr{reg=clear_regs(Int#sr.reg)},St};
+match_fail_cg({badmatch,Term}, Le, Vdb, Bef, St) ->
+ R = cg_reg_arg(Term, Bef),
+ Int0 = clear_dead(Bef, Le#l.i, Vdb),
+ {Sis,Int} = adjust_stack(Int0, Le#l.i, Le#l.i+1, Vdb),
+ {Sis ++ [{badmatch,R}],
+ Int#sr{reg=clear_regs(Int0#sr.reg)},St};
+match_fail_cg({case_clause,Reason}, Le, Vdb, Bef, St) ->
+ R = cg_reg_arg(Reason, Bef),
+ Int0 = clear_dead(Bef, Le#l.i, Vdb),
+ {Sis,Int} = adjust_stack(Int0, Le#l.i, Le#l.i+1, Vdb),
+ {Sis++[{case_end,R}],
+ Int#sr{reg=clear_regs(Bef#sr.reg)},St};
+match_fail_cg(if_clause, Le, Vdb, Bef, St) ->
+ Int0 = clear_dead(Bef, Le#l.i, Vdb),
+ {Sis,Int1} = adjust_stack(Int0, Le#l.i, Le#l.i+1, Vdb),
+ {Sis++[if_end],Int1#sr{reg=clear_regs(Int1#sr.reg)},St};
+match_fail_cg({try_clause,Reason}, Le, Vdb, Bef, St) ->
+ R = cg_reg_arg(Reason, Bef),
+ Int0 = clear_dead(Bef, Le#l.i, Vdb),
+ {Sis,Int} = adjust_stack(Int0, Le#l.i, Le#l.i+1, Vdb),
+ {Sis ++ [{try_case_end,R}],
+ Int#sr{reg=clear_regs(Int0#sr.reg)},St}.
+
+
+%% block_cg([Kexpr], Le, Vdb, StackReg, St) -> {[Ainstr],StackReg,St}.
+%% block_cg([Kexpr], Le, StackReg, St) -> {[Ainstr],StackReg,St}.
+
+block_cg(Es, Le, _Vdb, Bef, St) ->
+ block_cg(Es, Le, Bef, St).
+
+block_cg(Es, Le, Bef, St0) ->
+ case St0#cg.is_top_block of
+ false ->
+ cg_block(Es, Le#l.i, Le#l.vdb, Bef, St0);
+ true ->
+ {Keis,Aft,St1} = cg_block(Es, Le#l.i, Le#l.vdb, Bef,
+ St0#cg{is_top_block=false,
+ need_frame=false}),
+ top_level_block(Keis, Aft, max_reg(Bef#sr.reg), St1)
+ end.
+
+cg_block([], _I, _Vdb, Bef, St0) ->
+ {[],Bef,St0};
+cg_block(Kes0, I, Vdb, Bef, St0) ->
+ {Kes2,Int1,St1} =
+ case basic_block(Kes0) of
+ {Kes1,LastI,Args,Rest} ->
+ Ke = hd(Kes1),
+ Fb = Ke#l.i,
+ cg_basic_block(Kes1, Fb, LastI, Args, Vdb, Bef, St0);
+ {Kes1,Rest} ->
+ cg_list(Kes1, I, Vdb, Bef, St0)
+ end,
+ {Kes3,Int2,St2} = cg_block(Rest, I, Vdb, Int1, St1),
+ {Kes2 ++ Kes3,Int2,St2}.
+
+basic_block(Kes) -> basic_block(Kes, []).
+
+basic_block([], Acc) -> {reverse(Acc),[]};
+basic_block([Le|Les], Acc) ->
+ case collect_block(Le#l.ke) of
+ include -> basic_block(Les, [Le|Acc]);
+ {block_end,As} -> {reverse(Acc, [Le]),Le#l.i,As,Les};
+ no_block -> {reverse(Acc, [Le]),Les}
+ end.
+
+collect_block({set,_,{binary,_}}) -> no_block;
+collect_block({set,_,_}) -> include;
+collect_block({call,{var,_}=Var,As,_Rs}) -> {block_end,As++[Var]};
+collect_block({call,Func,As,_Rs}) -> {block_end,As++func_vars(Func)};
+collect_block({enter,{var,_}=Var,As})-> {block_end,As++[Var]};
+collect_block({enter,Func,As}) -> {block_end,As++func_vars(Func)};
+collect_block({return,Rs}) -> {block_end,Rs};
+collect_block({break,Bs}) -> {block_end,Bs};
+collect_block({bif,_Bif,_As,_Rs}) -> include;
+collect_block(_) -> no_block.
+
+func_vars({remote,M,F}) when element(1, M) == var;
+ element(1, F) == var ->
+ [M,F];
+func_vars(_) -> [].
+
+%% cg_basic_block([Kexpr], FirstI, LastI, As, Vdb, StackReg, State) ->
+%% {[Ainstr],StackReg,State}.
+
+cg_basic_block(Kes, Fb, Lf, As, Vdb, Bef, St0) ->
+ Res = make_reservation(As, 0),
+ Regs0 = reserve(Res, Bef#sr.reg, Bef#sr.stk),
+ Stk = extend_stack(Bef, Lf, Lf+1, Vdb),
+ Int0 = Bef#sr{reg=Regs0,stk=Stk,res=Res},
+ X0_v0 = x0_vars(As, Fb, Lf, Vdb),
+ {Keis,{Aft,_,St1}} =
+ flatmapfoldl(fun(Ke, St) -> cg_basic_block(Ke, St, Lf, Vdb) end,
+ {Int0,X0_v0,St0}, need_heap(Kes, Fb)),
+ {Keis,Aft,St1}.
+
+cg_basic_block(Ke, {Inta,X0v,Sta}, _Lf, Vdb) when element(1, Ke#l.ke) =:= need_heap ->
+ {Keis,Intb,Stb} = cg(Ke, Vdb, Inta, Sta),
+ {comment(Inta) ++ Keis, {Intb,X0v,Stb}};
+cg_basic_block(Ke, {Inta,X0_v1,Sta}, Lf, Vdb) ->
+ {Sis,Intb} = save_carefully(Inta, Ke#l.i, Lf+1, Vdb),
+ {X0_v2,Intc} = allocate_x0(X0_v1, Ke#l.i, Intb),
+ Intd = reserve(Intc),
+ {Keis,Inte,Stb} = cg(Ke, Vdb, Intd, Sta),
+ {comment(Inta) ++ Sis ++ Keis, {Inte,X0_v2,Stb}}.
+
+make_reservation([], _) -> [];
+make_reservation([{var,V}|As], I) -> [{I,V}|make_reservation(As, I+1)];
+make_reservation([A|As], I) -> [{I,A}|make_reservation(As, I+1)].
+
+reserve(Sr) -> Sr#sr{reg=reserve(Sr#sr.res, Sr#sr.reg, Sr#sr.stk)}.
+
+reserve([{I,V}|Rs], [free|Regs], Stk) -> [{reserved,I,V}|reserve(Rs, Regs, Stk)];
+reserve([{I,V}|Rs], [{I,V}|Regs], Stk) -> [{I,V}|reserve(Rs, Regs, Stk)];
+reserve([{I,V}|Rs], [{I,Var}|Regs], Stk) ->
+ case on_stack(Var, Stk) of
+ true -> [{reserved,I,V}|reserve(Rs, Regs, Stk)];
+ false -> [{I,Var}|reserve(Rs, Regs, Stk)]
+ end;
+reserve([{I,V}|Rs], [{reserved,I,_}|Regs], Stk) ->
+ [{reserved,I,V}|reserve(Rs, Regs, Stk)];
+%reserve([{I,V}|Rs], [Other|Regs], Stk) -> [Other|reserve(Rs, Regs, Stk)];
+reserve([{I,V}|Rs], [], Stk) -> [{reserved,I,V}|reserve(Rs, [], Stk)];
+reserve([], Regs, _) -> Regs.
+
+extend_stack(Bef, Fb, Lf, Vdb) ->
+ Stk0 = clear_dead_stk(Bef#sr.stk, Fb, Vdb),
+ Saves = [V || {V,F,L} <- Vdb,
+ F < Fb,
+ L >= Lf,
+ not on_stack(V, Stk0)],
+ Stk1 = foldl(fun (V, Stk) -> put_stack(V, Stk) end, Stk0, Saves),
+ Bef#sr.stk ++ lists:duplicate(length(Stk1) - length(Bef#sr.stk), free).
+
+save_carefully(Bef, Fb, Lf, Vdb) ->
+ Stk = Bef#sr.stk,
+ %% New variables that are in use but not on stack.
+ New = [ {V,F,L} || {V,F,L} <- Vdb,
+ F < Fb,
+ L >= Lf,
+ not on_stack(V, Stk) ],
+ Saves = [ V || {V,_,_} <- keysort(2, New) ],
+ save_carefully(Saves, Bef, []).
+
+save_carefully([], Bef, Acc) -> {reverse(Acc),Bef};
+save_carefully([V|Vs], Bef, Acc) ->
+ case put_stack_carefully(V, Bef#sr.stk) of
+ error -> {reverse(Acc),Bef};
+ Stk1 ->
+ SrcReg = fetch_reg(V, Bef#sr.reg),
+ Move = {move,SrcReg,fetch_stack(V, Stk1)},
+ {x,_} = SrcReg, %Assertion - must be X register.
+ save_carefully(Vs, Bef#sr{stk=Stk1}, [Move|Acc])
+ end.
+
+x0_vars([], _Fb, _Lf, _Vdb) -> [];
+x0_vars([{var,V}|_], Fb, _Lf, Vdb) ->
+ {V,F,_L} = VFL = vdb_find(V, Vdb),
+ x0_vars1([VFL], Fb, F, Vdb);
+x0_vars([X0|_], Fb, Lf, Vdb) ->
+ x0_vars1([{X0,Lf,Lf}], Fb, Lf, Vdb).
+
+x0_vars1(X0, Fb, Xf, Vdb) ->
+ Vs0 = [VFL || {_V,F,L}=VFL <- Vdb,
+ F >= Fb,
+ L < Xf],
+ Vs1 = keysort(3, Vs0),
+ keysort(2, X0++Vs1).
+
+allocate_x0([], _, Bef) -> {[],Bef#sr{res=[]}};
+allocate_x0([{_,_,L}|Vs], I, Bef) when L =< I ->
+ allocate_x0(Vs, I, Bef);
+allocate_x0([{V,_F,_L}=VFL|Vs], _, Bef) ->
+ {[VFL|Vs],Bef#sr{res=reserve_x0(V, Bef#sr.res)}}.
+
+reserve_x0(V, [_|Res]) -> [{0,V}|Res];
+reserve_x0(V, []) -> [{0,V}].
+
+top_level_block(Keis, Bef, _MaxRegs, St0) when St0#cg.need_frame =:= false,
+ length(Bef#sr.stk) =:= 0 ->
+ %% This block need no stack frame. However, we still need to turn the
+ %% stack frame upside down.
+ MaxY = length(Bef#sr.stk)-1,
+ Keis1 = flatmap(fun (Tuple) when tuple(Tuple) ->
+ [turn_yregs(size(Tuple), Tuple, MaxY)];
+ (Other) ->
+ [Other]
+ end, Keis),
+ {Keis1, Bef, St0#cg{is_top_block=true}};
+top_level_block(Keis, Bef, MaxRegs, St0) ->
+ %% This top block needs an allocate instruction before it, and a
+ %% deallocate instruction before each return.
+ FrameSz = length(Bef#sr.stk),
+ MaxY = FrameSz-1,
+ Keis1 = flatmap(fun ({call_only,Arity,Func}) ->
+ [{call_last,Arity,Func,FrameSz}];
+ ({call_ext_only,Arity,Func}) ->
+ [{call_ext_last,Arity,Func,FrameSz}];
+ ({apply_only,Arity}) ->
+ [{apply_last,Arity,FrameSz}];
+ (return) ->
+ [{deallocate,FrameSz}, return];
+ (Tuple) when tuple(Tuple) ->
+ [turn_yregs(size(Tuple), Tuple, MaxY)];
+ (Other) ->
+ [Other]
+ end, Keis),
+ {[{allocate_zero,FrameSz,MaxRegs}|Keis1], Bef, St0#cg{is_top_block=true}}.
+
+%% turn_yregs(Size, Tuple, MaxY) -> Tuple'
+%% Renumber y register so that {y, 0} becomes {y, FrameSize-1},
+%% {y, FrameSize-1} becomes {y, 0} and so on. This is to make nested
+%% catches work. The code generation algorithm gives a lower register
+%% number to the outer catch, which is wrong.
+
+turn_yregs(0, Tp, _) -> Tp;
+turn_yregs(El, Tp, MaxY) when element(1, element(El, Tp)) == yy ->
+ turn_yregs(El-1, setelement(El, Tp, {y,MaxY-element(2, element(El, Tp))}), MaxY);
+turn_yregs(El, Tp, MaxY) when list(element(El, Tp)) ->
+ New = map(fun ({yy,YY}) -> {y,MaxY-YY};
+ (Other) -> Other end, element(El, Tp)),
+ turn_yregs(El-1, setelement(El, Tp, New), MaxY);
+turn_yregs(El, Tp, MaxY) ->
+ turn_yregs(El-1, Tp, MaxY).
+
+%% select_cg(Sclause, V, TypeFail, ValueFail, StackReg, State) ->
+%% {Is,StackReg,State}.
+%% Selecting type and value needs two failure labels, TypeFail is the
+%% label to jump to of the next type test when this type fails, and
+%% ValueFail is the label when this type is correct but the value is
+%% wrong. These are different as in the second case there is no need
+%% to try the next type, it will always fail.
+
+select_cg(#l{ke={type_clause,cons,[S]}}, {var,V}, Tf, Vf, Bef, St) ->
+ select_cons(S, V, Tf, Vf, Bef, St);
+select_cg(#l{ke={type_clause,nil,[S]}}, {var,V}, Tf, Vf, Bef, St) ->
+ select_nil(S, V, Tf, Vf, Bef, St);
+select_cg(#l{ke={type_clause,binary,[S]}}, {var,V}, Tf, Vf, Bef, St) ->
+ select_binary(S, V, Tf, Vf, Bef, St);
+select_cg(#l{ke={type_clause,bin_seg,S}}, {var,V}, Tf, Vf, Bef, St) ->
+ select_bin_segs(S, V, Tf, Vf, Bef, St);
+select_cg(#l{ke={type_clause,bin_end,[S]}}, {var,V}, Tf, Vf, Bef, St) ->
+ select_bin_end(S, V, Tf, Vf, Bef, St);
+select_cg(#l{ke={type_clause,Type,Scs}}, {var,V}, Tf, Vf, Bef, St0) ->
+ {Vis,{Aft,St1}} =
+ mapfoldl(fun (S, {Int,Sta}) ->
+ {Val,Is,Inta,Stb} = select_val(S, V, Vf, Bef, Sta),
+ {{Is,[Val]},{sr_merge(Int, Inta),Stb}}
+ end, {void,St0}, Scs),
+ OptVls = combine(lists:sort(combine(Vis))),
+ {Vls,Sis,St2} = select_labels(OptVls, St1, [], []),
+ {select_val_cg(Type, fetch_var(V, Bef), Vls, Tf, Vf, Sis), Aft, St2}.
+
+select_val_cg(tuple, R, [Arity,{f,Lbl}], Tf, Vf, [{label,Lbl}|Sis]) ->
+ [{test,is_tuple,{f,Tf},[R]},{test,test_arity,{f,Vf},[R,Arity]}|Sis];
+select_val_cg(tuple, R, Vls, Tf, Vf, Sis) ->
+ [{test,is_tuple,{f,Tf},[R]},{select_tuple_arity,R,{f,Vf},{list,Vls}}|Sis];
+select_val_cg(Type, R, [Val, {f,Lbl}], Fail, Fail, [{label,Lbl}|Sis]) ->
+ [{test,is_eq_exact,{f,Fail},[R,{Type,Val}]}|Sis];
+select_val_cg(Type, R, [Val, {f,Lbl}], Tf, Vf, [{label,Lbl}|Sis]) ->
+ [{test,select_type_test(Type),{f,Tf},[R]},
+ {test,is_eq_exact,{f,Vf},[R,{Type,Val}]}|Sis];
+select_val_cg(Type, R, Vls0, Tf, Vf, Sis) ->
+ Vls1 = map(fun ({f,Lbl}) -> {f,Lbl};
+ (Value) -> {Type,Value}
+ end, Vls0),
+ [{test,select_type_test(Type),{f,Tf},[R]}, {select_val,R,{f,Vf},{list,Vls1}}|Sis].
+
+select_type_test(tuple) -> is_tuple;
+select_type_test(integer) -> is_integer;
+select_type_test(atom) -> is_atom;
+select_type_test(float) -> is_float.
+
+combine([{Is,Vs1}, {Is,Vs2}|Vis]) -> combine([{Is,Vs1 ++ Vs2}|Vis]);
+combine([V|Vis]) -> [V|combine(Vis)];
+combine([]) -> [].
+
+select_labels([{Is,Vs}|Vis], St0, Vls, Sis) ->
+ {Lbl,St1} = new_label(St0),
+ select_labels(Vis, St1, add_vls(Vs, Lbl, Vls), [[{label,Lbl}|Is]|Sis]);
+select_labels([], St, Vls, Sis) ->
+ {Vls,append(Sis),St}.
+
+add_vls([V|Vs], Lbl, Acc) ->
+ add_vls(Vs, Lbl, [V, {f,Lbl}|Acc]);
+add_vls([], _, Acc) -> Acc.
+
+select_cons(#l{ke={val_clause,{cons,Es},B},i=I,vdb=Vdb}, V, Tf, Vf, Bef, St0) ->
+ {Eis,Int,St1} = select_extract_cons(V, Es, I, Vdb, Bef, St0),
+ {Bis,Aft,St2} = match_cg(B, Vf, Int, St1),
+ {[{test,is_nonempty_list,{f,Tf},[fetch_var(V, Bef)]}] ++ Eis ++ Bis,Aft,St2}.
+
+select_nil(#l{ke={val_clause,nil,B}}, V, Tf, Vf, Bef, St0) ->
+ {Bis,Aft,St1} = match_cg(B, Vf, Bef, St0),
+ {[{test,is_nil,{f,Tf},[fetch_var(V, Bef)]}] ++ Bis,Aft,St1}.
+
+select_binary(#l{ke={val_clause,{old_binary,Var},B}}=L,
+ V, Tf, Vf, Bef, St) ->
+ %% Currently handled in the same way as new binaries.
+ select_binary(L#l{ke={val_clause,{binary,Var},B}}, V, Tf, Vf, Bef, St);
+select_binary(#l{ke={val_clause,{binary,{var,Ivar}},B},i=I,vdb=Vdb},
+ V, Tf, Vf, Bef, St0) ->
+ Int0 = clear_dead(Bef, I, Vdb),
+ {Bis,Aft,St1} = match_cg(B, Vf, Int0, St0),
+ {[{test,bs_start_match,{f,Tf},[fetch_var(V, Bef)]},{bs_save,Ivar}|Bis],
+ Aft,St1}.
+
+select_bin_segs(Scs, Ivar, Tf, _Vf, Bef, St) ->
+ match_fmf(fun(S, Fail, Sta) ->
+ select_bin_seg(S, Ivar, Fail, Bef, Sta) end,
+ Tf, St, Scs).
+
+select_bin_seg(#l{ke={val_clause,{bin_seg,Size,U,T,Fs,Es},B},i=I,vdb=Vdb},
+ Ivar, Fail, Bef, St0) ->
+ {Mis,Int,St1} = select_extract_bin(Es, Size, U, T, Fs, Fail,
+ I, Vdb, Bef, St0),
+ {Bis,Aft,St2} = match_cg(B, Fail, Int, St1),
+ {[{bs_restore,Ivar}|Mis] ++ Bis,Aft,St2}.
+
+select_extract_bin([{var,Hd},{var,Tl}], Size0, Unit, Type, Flags, Vf,
+ I, Vdb, Bef, St) ->
+ SizeReg = get_bin_size_reg(Size0, Bef),
+ {Es,Aft} =
+ case vdb_find(Hd, Vdb) of
+ {_,_,Lhd} when Lhd =< I ->
+ {[{test,bs_skip_bits,{f,Vf},[SizeReg,Unit,{field_flags,Flags}]},
+ {bs_save,Tl}],Bef};
+ {_,_,_} ->
+ Reg0 = put_reg(Hd, Bef#sr.reg),
+ Int1 = Bef#sr{reg=Reg0},
+ Rhd = fetch_reg(Hd, Reg0),
+ Name = get_bits_instr(Type),
+ {[{test,Name,{f,Vf},[SizeReg,Unit,{field_flags,Flags},Rhd]},
+ {bs_save,Tl}],Int1}
+ end,
+ {Es,clear_dead(Aft, I, Vdb),St}.
+
+get_bin_size_reg({var,V}, Bef) ->
+ fetch_var(V, Bef);
+get_bin_size_reg(Literal, _Bef) ->
+ Literal.
+
+select_bin_end(#l{ke={val_clause,bin_end,B}},
+ Ivar, Tf, Vf, Bef, St0) ->
+ {Bis,Aft,St2} = match_cg(B, Vf, Bef, St0),
+ {[{bs_restore,Ivar},{test,bs_test_tail,{f,Tf},[0]}|Bis],Aft,St2}.
+
+get_bits_instr(integer) -> bs_get_integer;
+get_bits_instr(float) -> bs_get_float;
+get_bits_instr(binary) -> bs_get_binary.
+
+select_val(#l{ke={val_clause,{tuple,Es},B},i=I,vdb=Vdb}, V, Vf, Bef, St0) ->
+ {Eis,Int,St1} = select_extract_tuple(V, Es, I, Vdb, Bef, St0),
+ {Bis,Aft,St2} = match_cg(B, Vf, Int, St1),
+ {length(Es),Eis ++ Bis,Aft,St2};
+select_val(#l{ke={val_clause,{_,Val},B}}, _V, Vf, Bef, St0) ->
+ {Bis,Aft,St1} = match_cg(B, Vf, Bef, St0),
+ {Val,Bis,Aft,St1}.
+
+%% select_extract_tuple(Src, [V], I, Vdb, StackReg, State) ->
+%% {[E],StackReg,State}.
+%% Extract tuple elements, but only if they do not immediately die.
+
+select_extract_tuple(Src, Vs, I, Vdb, Bef, St) ->
+ F = fun ({var,V}, {Int0,Elem}) ->
+ case vdb_find(V, Vdb) of
+ {V,_,L} when L =< I -> {[], {Int0,Elem+1}};
+ _Other ->
+ Reg1 = put_reg(V, Int0#sr.reg),
+ Int1 = Int0#sr{reg=Reg1},
+ Rsrc = fetch_var(Src, Int1),
+ {[{get_tuple_element,Rsrc,Elem,fetch_reg(V, Reg1)}],
+ {Int1,Elem+1}}
+ end
+ end,
+ {Es,{Aft,_}} = flatmapfoldl(F, {Bef,0}, Vs),
+ {Es,Aft,St}.
+
+select_extract_cons(Src, [{var,Hd}, {var,Tl}], I, Vdb, Bef, St) ->
+ {Es,Aft} = case {vdb_find(Hd, Vdb), vdb_find(Tl, Vdb)} of
+ {{_,_,Lhd}, {_,_,Ltl}} when Lhd =< I, Ltl =< I ->
+ %% Both head and tail are dead. No need to generate
+ %% any instruction.
+ {[], Bef};
+ _ ->
+ %% At least one of head and tail will be used,
+ %% but we must always fetch both. We will call
+ %% clear_dead/2 to allow reuse of the register
+ %% in case only of them is used.
+
+ Reg0 = put_reg(Tl, put_reg(Hd, Bef#sr.reg)),
+ Int0 = Bef#sr{reg=Reg0},
+ Rsrc = fetch_var(Src, Int0),
+ Rhd = fetch_reg(Hd, Reg0),
+ Rtl = fetch_reg(Tl, Reg0),
+ Int1 = clear_dead(Int0, I, Vdb),
+ {[{get_list,Rsrc,Rhd,Rtl}], Int1}
+ end,
+ {Es,Aft,St}.
+
+
+guard_clause_cg(#l{ke={guard_clause,G,B},vdb=Vdb}, Fail, Bef, St0) ->
+ {Gis,Int,St1} = guard_cg(G, Fail, Vdb, Bef, St0),
+ {Bis,Aft,St2} = match_cg(B, Fail, Int, St1),
+ {Gis ++ Bis,Aft,St2}.
+
+%% guard_cg(Guard, Fail, Vdb, StackReg, State) ->
+%% {[Ainstr],StackReg,State}.
+%% A guard is a boolean expression of tests. Tests return true or
+%% false. A fault in a test causes the test to return false. Tests
+%% never return the boolean, instead we generate jump code to go to
+%% the correct exit point. Primops and tests all go to the next
+%% instruction on success or jump to a failure label.
+
+guard_cg(#l{ke={protected,Ts,Rs},i=I,vdb=Pdb}, Fail, _Vdb, Bef, St) ->
+ protected_cg(Ts, Rs, Fail, I, Pdb, Bef, St);
+guard_cg(#l{ke={block,Ts},i=I,vdb=Bdb}, Fail, _Vdb, Bef, St) ->
+ guard_cg_list(Ts, Fail, I, Bdb, Bef, St);
+guard_cg(#l{ke={test,Test,As},i=I,vdb=_Tdb}, Fail, Vdb, Bef, St) ->
+ test_cg(Test, As, Fail, I, Vdb, Bef, St);
+guard_cg(G, _Fail, Vdb, Bef, St) ->
+ %%ok = io:fwrite("cg ~w: ~p~n", [?LINE,{G,Fail,Vdb,Bef}]),
+ {Gis,Aft,St1} = cg(G, Vdb, Bef, St),
+ %%ok = io:fwrite("cg ~w: ~p~n", [?LINE,{Aft}]),
+ {Gis,Aft,St1}.
+
+%% protected_cg([Kexpr], [Ret], Fail, I, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+%% Do a protected. Protecteds without return values are just done
+%% for effect, the return value is not checked, success passes on to
+%% the next instruction and failure jumps to Fail. If there are
+%% return values then these must be set to 'false' on failure,
+%% control always passes to the next instruction.
+
+protected_cg(Ts, [], Fail, I, Vdb, Bef, St0) ->
+ %% Protect these calls, revert when done.
+ {Tis,Aft,St1} = guard_cg_list(Ts, Fail, I, Vdb, Bef,
+ St0#cg{btype=fail,bfail=Fail}),
+ {Tis,Aft,St1#cg{btype=St0#cg.btype,bfail=St0#cg.bfail}};
+protected_cg(Ts, Rs, _Fail, I, Vdb, Bef, St0) ->
+ {Pfail,St1} = new_label(St0),
+ {Psucc,St2} = new_label(St1),
+ {Tis,Aft,St3} = guard_cg_list(Ts, Pfail, I, Vdb, Bef,
+ St2#cg{btype=fail,bfail=Pfail}),
+ %%ok = io:fwrite("cg ~w: ~p~n", [?LINE,{Rs,I,Vdb,Aft}]),
+ %% Set return values to false.
+ Mis = map(fun ({var,V}) -> {move,{atom,false},fetch_var(V, Aft)} end, Rs),
+ Live = {'%live',max_reg(Aft#sr.reg)},
+ {Tis ++ [Live,{jump,{f,Psucc}},
+ {label,Pfail}] ++ Mis ++ [Live,{label,Psucc}],
+ Aft,St3#cg{btype=St0#cg.btype,bfail=St0#cg.bfail}}.
+
+%% test_cg(TestName, Args, Fail, I, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+%% Generate test instruction. Use explicit fail label here.
+
+test_cg(Test, As, Fail, I, Vdb, Bef, St) ->
+ case test_type(Test, length(As)) of
+ {cond_op,Op} ->
+ Ars = cg_reg_args(As, Bef),
+ Int = clear_dead(Bef, I, Vdb),
+ {[{test,Op,{f,Fail},Ars}],
+ clear_dead(Int, I, Vdb),
+ St};
+ {rev_cond_op,Op} ->
+ [S1,S2] = cg_reg_args(As, Bef),
+ Int = clear_dead(Bef, I, Vdb),
+ {[{test,Op,{f,Fail},[S2,S1]}],
+ clear_dead(Int, I, Vdb),
+ St}
+ end.
+
+test_type(is_atom, 1) -> {cond_op,is_atom};
+test_type(is_boolean, 1) -> {cond_op,is_boolean};
+test_type(is_binary, 1) -> {cond_op,is_binary};
+test_type(is_constant, 1) -> {cond_op,is_constant};
+test_type(is_float, 1) -> {cond_op,is_float};
+test_type(is_function, 1) -> {cond_op,is_function};
+test_type(is_integer, 1) -> {cond_op,is_integer};
+test_type(is_list, 1) -> {cond_op,is_list};
+test_type(is_number, 1) -> {cond_op,is_number};
+test_type(is_pid, 1) -> {cond_op,is_pid};
+test_type(is_port, 1) -> {cond_op,is_port};
+test_type(is_reference, 1) -> {cond_op,is_reference};
+test_type(is_tuple, 1) -> {cond_op,is_tuple};
+test_type('=<', 2) -> {rev_cond_op,is_ge};
+test_type('>', 2) -> {rev_cond_op,is_lt};
+test_type('<', 2) -> {cond_op,is_lt};
+test_type('>=', 2) -> {cond_op,is_ge};
+test_type('==', 2) -> {cond_op,is_eq};
+test_type('/=', 2) -> {cond_op,is_ne};
+test_type('=:=', 2) -> {cond_op,is_eq_exact};
+test_type('=/=', 2) -> {cond_op,is_ne_exact};
+test_type(internal_is_record, 3) -> {cond_op,internal_is_record}.
+
+%% guard_cg_list([Kexpr], Fail, I, Vdb, StackReg, St) ->
+%% {[Ainstr],StackReg,St}.
+
+guard_cg_list(Kes, Fail, I, Vdb, Bef, St0) ->
+ {Keis,{Aft,St1}} =
+ flatmapfoldl(fun (Ke, {Inta,Sta}) ->
+ {Keis,Intb,Stb} =
+ guard_cg(Ke, Fail, Vdb, Inta, Sta),
+ {comment(Inta) ++ Keis,{Intb,Stb}}
+ end, {Bef,St0}, need_heap(Kes, I)),
+ {Keis,Aft,St1}.
+
+%% match_fmf(Fun, LastFail, State, [Clause]) -> {Is,Aft,State}.
+%% This is a special flatmapfoldl for match code gen where we
+%% generate a "failure" label for each clause. The last clause uses
+%% an externally generated failure label, LastFail. N.B. We do not
+%% know or care how the failure labels are used.
+
+match_fmf(F, LastFail, St, [H]) ->
+ F(H, LastFail, St);
+match_fmf(F, LastFail, St0, [H|T]) ->
+ {Fail,St1} = new_label(St0),
+ {R,Aft1,St2} = F(H, Fail, St1),
+ {Rs,Aft2,St3} = match_fmf(F, LastFail, St2, T),
+ {R ++ [{label,Fail}] ++ Rs,sr_merge(Aft1, Aft2),St3};
+match_fmf(_, _, St, []) -> {[],void,St}.
+
+%% call_cg(Func, [Arg], [Ret], Le, Vdb, StackReg, State) ->
+%% {[Ainstr],StackReg,State}.
+%% enter_cg(Func, [Arg], Le, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+%% Call and enter first put the arguments into registers and save any
+%% other registers, then clean up and compress the stack and set the
+%% frame size. Finally the actual call is made. Call then needs the
+%% return values filled in.
+
+call_cg({var,V}, As, Rs, Le, Vdb, Bef, St0) ->
+ {Sis,Int} = cg_setup_call(As++[{var,V}], Bef, Le#l.i, Vdb),
+ %% Put return values in registers.
+ Reg = load_vars(Rs, clear_regs(Int#sr.reg)),
+ %% Build complete code and final stack/register state.
+ Arity = length(As),
+ {Frees,Aft} = free_dead(clear_dead(Int#sr{reg=Reg}, Le#l.i, Vdb)),
+ {comment({call_fun,{var,V},As}) ++ Sis ++ Frees ++ [{call_fun,Arity}],
+ Aft,need_stack_frame(St0)};
+call_cg({remote,Mod,Name}, As, Rs, Le, Vdb, Bef, St0)
+ when element(1, Mod) == var;
+ element(1, Name) == var ->
+ {Sis,Int} = cg_setup_call(As++[Mod,Name], Bef, Le#l.i, Vdb),
+ %% Put return values in registers.
+ Reg = load_vars(Rs, clear_regs(Int#sr.reg)),
+ %% Build complete code and final stack/register state.
+ Arity = length(As),
+ Call = {apply,Arity},
+ St = need_stack_frame(St0),
+ %%{Call,St1} = build_call(Func, Arity, St0),
+ {Frees,Aft} = free_dead(clear_dead(Int#sr{reg=Reg}, Le#l.i, Vdb)),
+ {Sis ++ Frees ++ [Call],Aft,St};
+call_cg(Func, As, Rs, Le, Vdb, Bef, St0) ->
+ {Sis,Int} = cg_setup_call(As, Bef, Le#l.i, Vdb),
+ %% Put return values in registers.
+ Reg = load_vars(Rs, clear_regs(Int#sr.reg)),
+ %% Build complete code and final stack/register state.
+ Arity = length(As),
+ {Call,St1} = build_call(Func, Arity, St0),
+ {Frees,Aft} = free_dead(clear_dead(Int#sr{reg=Reg}, Le#l.i, Vdb)),
+ {comment({call,Func,As}) ++ Sis ++ Frees ++ Call,Aft,St1}.
+
+build_call({remote,{atom,erlang},{atom,'!'}}, 2, St0) ->
+ {[send],need_stack_frame(St0)};
+build_call({remote,{atom,Mod},{atom,Name}}, Arity, St0) ->
+ {[{call_ext,Arity,{extfunc,Mod,Name,Arity}}],need_stack_frame(St0)};
+build_call(Name, Arity, St0) when atom(Name) ->
+ {Lbl,St1} = local_func_label(Name, Arity, need_stack_frame(St0)),
+ {[{call,Arity,{f,Lbl}}],St1}.
+
+free_dead(#sr{stk=Stk0}=Aft) ->
+ {Instr,Stk} = free_dead(Stk0, 0, [], []),
+ {Instr,Aft#sr{stk=Stk}}.
+
+free_dead([dead|Stk], Y, Instr, StkAcc) ->
+ %% Note: kill/1 is equivalent to init/1 (translated by beam_asm).
+ %% We use kill/1 to help further optimisation passes.
+ free_dead(Stk, Y+1, [{kill,{yy,Y}}|Instr], [free|StkAcc]);
+free_dead([Any|Stk], Y, Instr, StkAcc) ->
+ free_dead(Stk, Y+1, Instr, [Any|StkAcc]);
+free_dead([], _, Instr, StkAcc) -> {Instr,reverse(StkAcc)}.
+
+enter_cg({var,V}, As, Le, Vdb, Bef, St0) ->
+ {Sis,Int} = cg_setup_call(As++[{var,V}], Bef, Le#l.i, Vdb),
+ %% Build complete code and final stack/register state.
+ Arity = length(As),
+ {comment({call_fun,{var,V},As}) ++ Sis ++ [{call_fun,Arity},return],
+ clear_dead(Int#sr{reg=clear_regs(Int#sr.reg)}, Le#l.i, Vdb),
+ need_stack_frame(St0)};
+enter_cg({remote,Mod,Name}=Func, As, Le, Vdb, Bef, St0)
+ when element(1, Mod) == var;
+ element(1, Name) == var ->
+ {Sis,Int} = cg_setup_call(As++[Mod,Name], Bef, Le#l.i, Vdb),
+ %% Build complete code and final stack/register state.
+ Arity = length(As),
+ Call = {apply_only,Arity},
+ St = need_stack_frame(St0),
+ {comment({enter,Func,As}) ++ Sis ++ [Call],
+ clear_dead(Int#sr{reg=clear_regs(Int#sr.reg)}, Le#l.i, Vdb),
+ St};
+enter_cg(Func, As, Le, Vdb, Bef, St0) ->
+ {Sis,Int} = cg_setup_call(As, Bef, Le#l.i, Vdb),
+ %% Build complete code and final stack/register state.
+ Arity = length(As),
+ {Call,St1} = build_enter(Func, Arity, St0),
+ {comment({enter,Func,As}) ++ Sis ++ Call,
+ clear_dead(Int#sr{reg=clear_regs(Int#sr.reg)}, Le#l.i, Vdb),
+ St1}.
+
+build_enter({remote,{atom,erlang},{atom,'!'}}, 2, St0) ->
+ {[send,return],need_stack_frame(St0)};
+build_enter({remote,{atom,Mod},{atom,Name}}, Arity, St0) ->
+ St1 = case trap_bif(Mod, Name, Arity) of
+ true -> need_stack_frame(St0);
+ false -> St0
+ end,
+ {[{call_ext_only,Arity,{extfunc,Mod,Name,Arity}}],St1};
+build_enter(Name, Arity, St0) when is_atom(Name) ->
+ {Lbl,St1} = local_func_label(Name, Arity, St0),
+ {[{call_only,Arity,{f,Lbl}}],St1}.
+
+%% local_func_label(Name, Arity, State) -> {Label,State'}
+%% Get the function entry label for a local function.
+
+local_func_label(Name, Arity, St0) ->
+ Key = {Name,Arity},
+ case keysearch(Key, 1, St0#cg.functable) of
+ {value,{Key,Label}} ->
+ {Label,St0};
+ false ->
+ {Label,St1} = new_label(St0),
+ {Label,St1#cg{functable=[{Key,Label}|St1#cg.functable]}}
+ end.
+
+%% need_stack_frame(State) -> State'
+%% Make a note in the state that this function will need a stack frame.
+
+need_stack_frame(#cg{need_frame=true}=St) -> St;
+need_stack_frame(St) -> St#cg{need_frame=true}.
+
+%% trap_bif(Mod, Name, Arity) -> true|false
+%% Trap bifs that need a stack frame.
+
+trap_bif(erlang, '!', 2) -> true;
+trap_bif(erlang, link, 1) -> true;
+trap_bif(erlang, unlink, 1) -> true;
+trap_bif(erlang, monitor_node, 2) -> true;
+trap_bif(erlang, group_leader, 2) -> true;
+trap_bif(erlang, exit, 2) -> true;
+trap_bif(_, _, _) -> false.
+
+%% bif_cg(Bif, [Arg], [Ret], Le, Vdb, StackReg, State) ->
+%% {[Ainstr],StackReg,State}.
+
+bif_cg(dsetelement, [Index0,Tuple0,New0], _Rs, Le, Vdb, Bef, St0) ->
+ [New,Tuple,{integer,Index1}] = cg_reg_args([New0,Tuple0,Index0], Bef),
+ Index = Index1-1,
+ {[{set_tuple_element,New,Tuple,Index}],
+ clear_dead(Bef, Le#l.i, Vdb), St0};
+bif_cg({make_fun,Func,Arity,Index,Uniq}, As, Rs, Le, Vdb, Bef, St0) ->
+ %% This behaves more like a function call.
+ {Sis,Int} = cg_setup_call(As, Bef, Le#l.i, Vdb),
+ Reg = load_vars(Rs, clear_regs(Int#sr.reg)),
+ {FuncLbl,St1} = local_func_label(Func, Arity, St0),
+ MakeFun = case St0#cg.new_funs of
+ true -> {make_fun2,{f,FuncLbl},Index,Uniq,length(As)};
+ false -> {make_fun,{f,FuncLbl},Uniq,length(As)}
+ end,
+ {comment({make_fun,{Func,Arity,Uniq},As}) ++ Sis ++
+ [MakeFun],
+ clear_dead(Int#sr{reg=Reg}, Le#l.i, Vdb),
+ St1};
+bif_cg(Bif, As, [{var,V}], Le, Vdb, Bef, St0) ->
+ Ars = cg_reg_args(As, Bef),
+
+ %% If we are inside a catch, we must save everything that will
+ %% be alive after the catch (because the BIF might fail and there
+ %% will be a jump to the code after the catch).
+ %% Currently, we are somewhat pessimistic in
+ %% that we save any variable that will be live after this BIF call.
+
+ {Sis,Int0} =
+ case St0#cg.in_catch of
+ true -> adjust_stack(Bef, Le#l.i, Le#l.i+1, Vdb);
+ false -> {[],Bef}
+ end,
+
+ Int1 = clear_dead(Int0, Le#l.i, Vdb),
+ Reg = put_reg(V, Int1#sr.reg),
+ Int = Int1#sr{reg=Reg},
+ Dst = fetch_reg(V, Reg),
+ {Sis ++ [{bif,Bif,bif_fail(St0#cg.btype, St0#cg.bfail, length(Ars)),Ars,Dst}],
+ clear_dead(Int, Le#l.i, Vdb), St0}.
+
+bif_fail(_, _, 0) -> nofail;
+bif_fail(exit, _, _) -> {f,0};
+bif_fail(fail, Fail, _) -> {f,Fail}.
+
+%% recv_loop_cg(TimeOut, ReceiveVar, ReceiveMatch, TimeOutExprs,
+%% [Ret], Le, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+
+recv_loop_cg(Te, Rvar, Rm, Tes, Rs, Le, Vdb, Bef, St0) ->
+ {Sis,Int0} = adjust_stack(Bef, Le#l.i, Le#l.i, Vdb),
+ Int1 = Int0#sr{reg=clear_regs(Int0#sr.reg)},
+ %% Get labels.
+ {Rl,St1} = new_label(St0),
+ {Tl,St2} = new_label(St1),
+ {Bl,St3} = new_label(St2),
+ St4 = St3#cg{break=Bl,recv=Rl}, %Set correct receive labels
+ {Ris,Raft,St5} = cg_recv_mesg(Rvar, Rm, Tl, Int1, St4),
+ {Wis,Taft,St6} = cg_recv_wait(Te, Tes, Le#l.i, Int1, St5),
+ Int2 = sr_merge(Raft, Taft), %Merge stack/registers
+ Reg = load_vars(Rs, Int2#sr.reg),
+ {Sis ++ Ris ++ [{label,Tl}] ++ Wis ++ [{label,Bl}],
+ clear_dead(Int2#sr{reg=Reg}, Le#l.i, Vdb),
+ St6#cg{break=St0#cg.break,recv=St0#cg.recv}}.
+
+%% cg_recv_mesg( ) -> {[Ainstr],Aft,St}.
+
+cg_recv_mesg({var,R}, Rm, Tl, Bef, St0) ->
+ Int0 = Bef#sr{reg=put_reg(R, Bef#sr.reg)},
+ Ret = fetch_reg(R, Int0#sr.reg),
+ %% Int1 = clear_dead(Int0, I, Rm#l.vdb),
+ Int1 = Int0,
+ {Mis,Int2,St1} = match_cg(Rm, none, Int1, St0),
+ {[{'%live',0},{label,St1#cg.recv},{loop_rec,{f,Tl},Ret}|Mis],Int2,St1}.
+
+%% cg_recv_wait(Te, Tes, I, Vdb, Int2, St3) -> {[Ainstr],Aft,St}.
+
+cg_recv_wait({atom,infinity}, Tes, I, Bef, St0) ->
+ %% We know that the 'after' body will never be executed.
+ %% But to keep the stack and register information up to date,
+ %% we will generate the code for the 'after' body, and then discard it.
+ Int1 = clear_dead(Bef, I, Tes#l.vdb),
+ {_,Int2,St1} = cg_block(Tes#l.ke, Tes#l.i, Tes#l.vdb,
+ Int1#sr{reg=clear_regs(Int1#sr.reg)}, St0),
+ {[{wait,{f,St1#cg.recv}}],Int2,St1};
+cg_recv_wait({integer,0}, Tes, _I, Bef, St0) ->
+ {Tis,Int,St1} = cg_block(Tes#l.ke, Tes#l.i, Tes#l.vdb, Bef, St0),
+ {[timeout|Tis],Int,St1};
+cg_recv_wait(Te, Tes, I, Bef, St0) ->
+ Reg = cg_reg_arg(Te, Bef),
+ %% Must have empty registers here! Bug if anything in registers.
+ Int0 = clear_dead(Bef, I, Tes#l.vdb),
+ {Tis,Int,St1} = cg_block(Tes#l.ke, Tes#l.i, Tes#l.vdb,
+ Int0#sr{reg=clear_regs(Int0#sr.reg)}, St0),
+ {[{wait_timeout,{f,St1#cg.recv},Reg},timeout] ++ Tis,Int,St1}.
+
+%% recv_next_cg(Le, Vdb, StackReg, St) -> {[Ainstr],StackReg,St}.
+%% Use adjust stack to clear stack, but only need it for Aft.
+
+recv_next_cg(Le, Vdb, Bef, St) ->
+ {Sis,Aft} = adjust_stack(Bef, Le#l.i, Le#l.i+1, Vdb),
+ {[{loop_rec_end,{f,St#cg.recv}}] ++ Sis,Aft,St}. %Joke
+
+%% try_cg(TryBlock, [BodyVar], TryBody, [ExcpVar], TryHandler, [Ret],
+%% Le, Vdb, StackReg, St) -> {[Ainstr],StackReg,St}.
+
+try_cg(Ta, Vs, Tb, Evs, Th, Rs, Le, Vdb, Bef, St0) ->
+ {B,St1} = new_label(St0), %Body label
+ {H,St2} = new_label(St1), %Handler label
+ {E,St3} = new_label(St2), %End label
+ TryTag = Ta#l.i,
+ Int1 = Bef#sr{stk=put_catch(TryTag, Bef#sr.stk)},
+ TryReg = fetch_stack({catch_tag,TryTag}, Int1#sr.stk),
+ {Ais,Int2,St4} = cg(Ta, Vdb, Int1, St3#cg{break=B,in_catch=true}),
+ Int3 = Int2#sr{stk=drop_catch(TryTag, Int2#sr.stk)},
+ St5 = St4#cg{break=E,in_catch=St3#cg.in_catch},
+ {Bis,Baft,St6} = cg(Tb, Vdb, Int3#sr{reg=load_vars(Vs, Int3#sr.reg)}, St5),
+ {His,Haft,St7} = cg(Th, Vdb, Int3#sr{reg=load_vars(Evs, Int3#sr.reg)}, St6),
+ Int4 = sr_merge(Baft, Haft), %Merge stack/registers
+ Aft = Int4#sr{reg=load_vars(Rs, Int4#sr.reg)},
+ {[{'try',TryReg,{f,H}}] ++ Ais ++
+ [{label,B},{try_end,TryReg}] ++ Bis ++
+ [{label,H},{try_case,TryReg}] ++ His ++
+ [{label,E}],
+ clear_dead(Aft, Le#l.i, Vdb),
+ St7#cg{break=St0#cg.break}}.
+
+%% catch_cg(CatchBlock, Ret, Le, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+
+catch_cg(C, {var,R}, Le, Vdb, Bef, St0) ->
+ {B,St1} = new_label(St0),
+ CatchTag = Le#l.i,
+ Int1 = Bef#sr{stk=put_catch(CatchTag, Bef#sr.stk)},
+ CatchReg = fetch_stack({catch_tag,CatchTag}, Int1#sr.stk),
+ {Cis,Int2,St2} = cg_block(C, Le#l.i, Le#l.vdb, Int1,
+ St1#cg{break=B,in_catch=true}),
+ Aft = Int2#sr{reg=load_reg(R, 0, Int2#sr.reg),
+ stk=drop_catch(CatchTag, Int2#sr.stk)},
+ {[{'catch',CatchReg,{f,B}}] ++ Cis ++
+ [{label,B},{catch_end,CatchReg}],
+ clear_dead(Aft, Le#l.i, Vdb),
+ St2#cg{break=St1#cg.break,in_catch=St1#cg.in_catch}}.
+
+%% set_cg([Var], Constr, Le, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+%% We have to be careful how a 'set' works. First the structure is
+%% built, then it is filled and finally things can be cleared. The
+%% annotation must reflect this and make sure that the return
+%% variable is allocated first.
+%%
+%% put_list for constructing a cons is an atomic instruction
+%% which can safely resuse one of the source registers as target.
+%% Also binaries can reuse a source register as target.
+
+set_cg([{var,R}], {cons,Es}, Le, Vdb, Bef, St) ->
+ [S1,S2] = map(fun ({var,V}) -> fetch_var(V, Bef);
+ (Other) -> Other
+ end, Es),
+ Int0 = clear_dead(Bef, Le#l.i, Vdb),
+ Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)},
+ Ret = fetch_reg(R, Int1#sr.reg),
+ {[{put_list,S1,S2,Ret}], Int1, St};
+set_cg([{var,R}], {old_binary,Segs}, Le, Vdb, Bef, St) ->
+ Fail = bif_fail(St#cg.btype, St#cg.bfail, 42),
+ PutCode = cg_bin_put(Segs, Fail, Bef),
+ Code = cg_binary_old(PutCode),
+ Int0 = clear_dead(Bef, Le#l.i, Vdb),
+ Aft = Int0#sr{reg=put_reg(R, Int0#sr.reg)},
+ Ret = fetch_reg(R, Aft#sr.reg),
+ {Code ++ [{bs_final,Fail,Ret}],Aft,St};
+set_cg([{var,R}], {binary,Segs}, Le, Vdb, Bef, #cg{in_catch=InCatch}=St) ->
+ Int0 = Bef#sr{reg=put_reg(R, Bef#sr.reg)},
+ Target = fetch_reg(R, Int0#sr.reg),
+ Fail = bif_fail(St#cg.btype, St#cg.bfail, 42),
+ Temp = find_scratch_reg(Int0#sr.reg),
+ PutCode = cg_bin_put(Segs, Fail, Bef),
+ {Sis,Int1} =
+ case InCatch of
+ true -> adjust_stack(Int0, Le#l.i, Le#l.i+1, Vdb);
+ false -> {[],Int0}
+ end,
+ Aft = clear_dead(Int1, Le#l.i, Vdb),
+ Code = cg_binary(PutCode, Target, Temp, Fail, Aft),
+ {Sis++Code,Aft,St};
+set_cg([{var,R}], Con, Le, Vdb, Bef, St) ->
+ %% Find a place for the return register first.
+ Int = Bef#sr{reg=put_reg(R, Bef#sr.reg)},
+ Ret = fetch_reg(R, Int#sr.reg),
+ Ais = case Con of
+ {tuple,Es} ->
+ [{put_tuple,length(Es),Ret}] ++ cg_build_args(Es, Bef);
+ {var,V} -> % Normally removed by kernel optimizer.
+ [{move,fetch_var(V, Int),Ret}];
+ {string,Str} ->
+ [{put_string,length(Str),{string,Str},Ret}];
+ Other ->
+ [{move,Other,Ret}]
+ end,
+ {Ais,clear_dead(Int, Le#l.i, Vdb),St};
+set_cg([], {binary,Segs}, Le, Vdb, Bef, St) ->
+ Fail = bif_fail(St#cg.btype, St#cg.bfail, 42),
+ Target = find_scratch_reg(Bef#sr.reg),
+ Temp = find_scratch_reg(put_reg(Target, Bef#sr.reg)),
+ PutCode = cg_bin_put(Segs, Fail, Bef),
+ Code = cg_binary(PutCode, Target, Temp, Fail, Bef),
+ Aft = clear_dead(Bef, Le#l.i, Vdb),
+ {Code,Aft,St};
+set_cg([], {old_binary,Segs}, Le, Vdb, Bef, St) ->
+ Fail = bif_fail(St#cg.btype, St#cg.bfail, 42),
+ PutCode = cg_bin_put(Segs, Fail, Bef),
+ Ais0 = cg_binary_old(PutCode),
+ Ret = find_scratch_reg(Bef#sr.reg),
+ Ais = Ais0 ++ [{bs_final,Fail,Ret}],
+ {Ais,clear_dead(Bef, Le#l.i, Vdb),St};
+set_cg([], _, Le, Vdb, Bef, St) ->
+ %% This should have been stripped by compiler, just cleanup.
+ {[],clear_dead(Bef, Le#l.i, Vdb), St}.
+
+
+%%%
+%%% Code generation for constructing binaries.
+%%%
+
+cg_binary(PutCode, Target, Temp, Fail, Bef) ->
+ SzCode = cg_binary_size(PutCode, Target, Temp, Fail),
+ MaxRegs = max_reg(Bef#sr.reg),
+ Code = SzCode ++ [{bs_init2,Fail,Target,MaxRegs,{field_flags,[]},Target}|PutCode],
+ cg_bin_opt(Code).
+
+cg_binary_size(PutCode, Target, Temp, Fail) ->
+ Szs = cg_binary_size_1(PutCode, 0, []),
+ cg_binary_size_expr(Szs, Target, Temp, Fail).
+
+cg_binary_size_1([{_Put,_Fail,S,U,_Flags,Src}|T], Bits, Acc) ->
+ cg_binary_size_2(S, U, Src, T, Bits, Acc);
+cg_binary_size_1([], Bits, Acc) ->
+ Bytes = Bits div 8,
+ RemBits = Bits rem 8,
+ Res = sort([{1,{integer,RemBits}},{8,{integer,Bytes}}|Acc]),
+ cg_binary_size_3(Res).
+
+cg_binary_size_2({integer,N}, U, _, Next, Bits, Acc) ->
+ cg_binary_size_1(Next, Bits+N*U, Acc);
+cg_binary_size_2({atom,all}, 8, E, Next, Bits, Acc) ->
+ cg_binary_size_1(Next, Bits, [{8,{size,E}}|Acc]);
+cg_binary_size_2(Reg, 1, _, Next, Bits, Acc) ->
+ cg_binary_size_1(Next, Bits, [{1,Reg}|Acc]);
+cg_binary_size_2(Reg, 8, _, Next, Bits, Acc) ->
+ cg_binary_size_1(Next, Bits, [{8,Reg}|Acc]);
+cg_binary_size_2(Reg, U, _, Next, Bits, Acc) ->
+ cg_binary_size_1(Next, Bits, [{1,{'*',Reg,U}}|Acc]).
+
+cg_binary_size_3([{_,{integer,0}}|T]) ->
+ cg_binary_size_3(T);
+cg_binary_size_3([{U,S1},{U,S2}|T]) ->
+ {L0,Rest} = cg_binary_size_4(T, U, []),
+ L = [S1,S2|L0],
+ [{U,L}|cg_binary_size_3(Rest)];
+cg_binary_size_3([{U,S}|T]) ->
+ [{U,[S]}|cg_binary_size_3(T)];
+cg_binary_size_3([]) -> [].
+
+cg_binary_size_4([{U,S}|T], U, Acc) ->
+ cg_binary_size_4(T, U, [S|Acc]);
+cg_binary_size_4(T, _, Acc) ->
+ {Acc,T}.
+
+%% cg_binary_size_expr/4
+%% Generate code for calculating the resulting size of a binary.
+cg_binary_size_expr(Sizes, Target, Temp, Fail) ->
+ cg_binary_size_expr_1(Sizes, Target, Temp, Fail,
+ [{move,{integer,0},Target}]).
+
+cg_binary_size_expr_1([{1,E0}|T], Target, Temp, Fail, Acc) ->
+ E1 = cg_gen_binsize(E0, Target, Temp, Fail, Acc),
+ E = [{bs_bits_to_bytes,Fail,Target,Target}|E1],
+ cg_binary_size_expr_1(T, Target, Temp, Fail, E);
+cg_binary_size_expr_1([{8,E0}], Target, Temp, Fail, Acc) ->
+ E = cg_gen_binsize(E0, Target, Temp, Fail, Acc),
+ reverse(E);
+cg_binary_size_expr_1([], _, _, _, Acc) -> reverse(Acc).
+
+cg_gen_binsize([{'*',A,B}|T], Target, Temp, Fail, Acc) ->
+ cg_gen_binsize(T, Target, Temp, Fail,
+ [{bs_add,Fail,[Target,A,B],Target}|Acc]);
+cg_gen_binsize([{size,B}|T], Target, Temp, Fail, Acc) ->
+ cg_gen_binsize([Temp|T], Target, Temp, Fail,
+ [{bif,size,Fail,[B],Temp}|Acc]);
+cg_gen_binsize([E0|T], Target, Temp, Fail, Acc) ->
+ cg_gen_binsize(T, Target, Temp, Fail,
+ [{bs_add,Fail,[Target,E0,1],Target}|Acc]);
+cg_gen_binsize([], _, _, _, Acc) -> Acc.
+
+%% cg_bin_opt(Code0) -> Code
+%% Optimize the size calculations for binary construction.
+
+cg_bin_opt([{move,{integer,0},D},{bs_add,_,[D,{integer,_}=S,1],Dst}|Is]) ->
+ cg_bin_opt([{move,S,Dst}|Is]);
+cg_bin_opt([{move,{integer,0},D},{bs_add,Fail,[D,S,U],Dst}|Is]) ->
+ cg_bin_opt([{bs_add,Fail,[{integer,0},S,U],Dst}|Is]);
+cg_bin_opt([{move,{integer,Bytes},D},{bs_init2,Fail,D,Regs0,Flags,D}|Is]) ->
+ Regs = cg_bo_newregs(Regs0, D),
+ cg_bin_opt([{bs_init2,Fail,Bytes,Regs,Flags,D}|Is]);
+cg_bin_opt([{move,Src,D},{bs_init2,Fail,D,Regs0,Flags,D}|Is]) ->
+ Regs = cg_bo_newregs(Regs0, D),
+ cg_bin_opt([{bs_init2,Fail,Src,Regs,Flags,D}|Is]);
+cg_bin_opt([{move,Src,Dst},{bs_bits_to_bytes,Fail,Dst,Dst}|Is]) ->
+ cg_bin_opt([{bs_bits_to_bytes,Fail,Src,Dst}|Is]);
+cg_bin_opt([{move,Src1,Dst},{bs_add,Fail,[Dst,Src2,U],Dst}|Is]) ->
+ cg_bin_opt([{bs_add,Fail,[Src1,Src2,U],Dst}|Is]);
+cg_bin_opt([{bs_bits_to_bytes,Fail,{integer,N},_}|Is0]) when N rem 8 =/= 0 ->
+ case Fail of
+ {f,0} ->
+ Is = [{move,{atom,badarg},{x,0}},
+ {call_ext_only,1,{extfunc,erlang,error,1}}|Is0],
+ cg_bin_opt(Is);
+ _ ->
+ cg_bin_opt([{jump,Fail}|Is0])
+ end;
+cg_bin_opt([I|Is]) ->
+ [I|cg_bin_opt(Is)];
+cg_bin_opt([]) -> [].
+
+cg_bo_newregs(R, {x,X}) when R-1 =:= X -> R-1;
+cg_bo_newregs(R, _) -> R.
+
+%% Common for new and old binary code generation.
+
+cg_bin_put({bin_seg,S0,U,T,Fs,[E0,Next]}, Fail, Bef) ->
+ S1 = case S0 of
+ {var,Sv} -> fetch_var(Sv, Bef);
+ _ -> S0
+ end,
+ E1 = case E0 of
+ {var,V} -> fetch_var(V, Bef);
+ Other -> Other
+ end,
+ Op = case T of
+ integer -> bs_put_integer;
+ binary -> bs_put_binary;
+ float -> bs_put_float
+ end,
+ [{Op,Fail,S1,U,{field_flags,Fs},E1}|cg_bin_put(Next, Fail, Bef)];
+cg_bin_put(bin_end, _, _) -> [].
+
+%% Old style.
+
+cg_binary_old(PutCode) ->
+ [cg_bs_init(PutCode)] ++ need_bin_buf(PutCode).
+
+cg_bs_init(Code) ->
+ {Size,Fs} = foldl(fun ({_,_,{integer,N},U,_,_}, {S,Fs}) ->
+ {S + N*U,Fs};
+ (_, {S,_}) ->
+ {S,[]}
+ end, {0,[exact]}, Code),
+ {bs_init,(Size+7) div 8,{field_flags,Fs}}.
+
+need_bin_buf(Code0) ->
+ {Code1,F,H} = foldr(fun ({_,_,{integer,N},U,_,_}=Bs, {Code,F,H}) ->
+ {[Bs|Code],F,H + N*U};
+ ({_,_,_,_,_,_}=Bs, {Code,F,H}) ->
+ {[Bs|need_bin_buf_need(H, F, Code)],true,0}
+ end, {[],false,0}, Code0),
+ need_bin_buf_need(H, F, Code1).
+
+need_bin_buf_need(0, false, Rest) -> Rest;
+need_bin_buf_need(H, _, Rest) -> [{bs_need_buf,H}|Rest].
+
+cg_build_args(As, Bef) ->
+ map(fun ({var,V}) -> {put,fetch_var(V, Bef)};
+ (Other) -> {put,Other}
+ end, As).
+
+%% return_cg([Val], Le, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+%% break_cg([Val], Le, Vdb, Bef, St) -> {[Ainstr],Aft,St}.
+%% These are very simple, just put return/break values in registers
+%% from 0, then return/break. Use the call setup to clean up stack,
+%% but must clear registers to ensure sr_merge works correctly.
+
+return_cg(Rs, Le, Vdb, Bef, St) ->
+ {Ms,Int} = cg_setup_call(Rs, Bef, Le#l.i, Vdb),
+ {comment({return,Rs}) ++ Ms ++ [return],
+ Int#sr{reg=clear_regs(Int#sr.reg)},St}.
+
+break_cg(Bs, Le, Vdb, Bef, St) ->
+ {Ms,Int} = cg_setup_call(Bs, Bef, Le#l.i, Vdb),
+ {comment({break,Bs}) ++ Ms ++ [{jump,{f,St#cg.break}}],
+ Int#sr{reg=clear_regs(Int#sr.reg)},St}.
+
+%% cg_reg_arg(Arg0, Info) -> Arg
+%% cg_reg_args([Arg0], Info) -> [Arg]
+%% Convert argument[s] into registers. Literal values are returned unchanged.
+
+cg_reg_args(As, Bef) -> [cg_reg_arg(A, Bef) || A <- As].
+
+cg_reg_arg({var,V}, Bef) -> fetch_var(V, Bef);
+cg_reg_arg(Literal, _) -> Literal.
+
+%% cg_setup_call([Arg], Bef, Cur, Vdb) -> {[Instr],Aft}.
+%% Do the complete setup for a call/enter.
+
+cg_setup_call(As, Bef, I, Vdb) ->
+ {Ms,Int0} = cg_call_args(As, Bef, I, Vdb),
+ %% Have set up arguments, can now clean up, compress and save to stack.
+ Int1 = Int0#sr{stk=clear_dead_stk(Int0#sr.stk, I, Vdb),res=[]},
+ {Sis,Int2} = adjust_stack(Int1, I, I+1, Vdb),
+ {Ms ++ Sis ++ [{'%live',length(As)}],Int2}.
+
+%% cg_call_args([Arg], SrState) -> {[Instr],SrState}.
+%% Setup the arguments to a call/enter/bif. Put the arguments into
+%% consecutive registers starting at {x,0} moving any data which
+%% needs to be saved. Return a modified SrState structure with the
+%% new register contents. N.B. the resultant register info will
+%% contain non-variable values when there are non-variable values.
+%%
+%% This routine is complicated by unsaved values in x registers.
+%% We'll move away any unsaved values that are in the registers
+%% to be overwritten by the arguments.
+
+cg_call_args(As, Bef, I, Vdb) ->
+ Regs0 = load_arg_regs(Bef#sr.reg, As),
+ Unsaved = unsaved_registers(Regs0, Bef#sr.stk, I, I+1, Vdb),
+ {UnsavedMoves,Regs} = move_unsaved(Unsaved, Bef#sr.reg, Regs0),
+ Moves0 = gen_moves(As, Bef),
+ Moves = order_moves(Moves0, find_scratch_reg(Regs)),
+ {UnsavedMoves ++ Moves,Bef#sr{reg=Regs}}.
+
+%% load_arg_regs([Reg], Arguments) -> [Reg]
+%% Update the register descriptor to include the arguments (from {x,0}
+%% and upwards). Values in argument register are overwritten.
+%% Values in x registers above the arguments are preserved.
+
+load_arg_regs(Regs, As) -> load_arg_regs(Regs, As, 0).
+
+load_arg_regs([_|Rs], [{var,V}|As], I) -> [{I,V}|load_arg_regs(Rs, As, I+1)];
+load_arg_regs([_|Rs], [A|As], I) -> [{I,A}|load_arg_regs(Rs, As, I+1)];
+load_arg_regs([], [{var,V}|As], I) -> [{I,V}|load_arg_regs([], As, I+1)];
+load_arg_regs([], [A|As], I) -> [{I,A}|load_arg_regs([], As, I+1)];
+load_arg_regs(Rs, [], _) -> Rs.
+
+%% Returns the variables must be saved and are currently in the
+%% x registers that are about to be overwritten by the arguments.
+
+unsaved_registers(Regs, Stk, Fb, Lf, Vdb) ->
+ [V || {V,F,L} <- Vdb,
+ F < Fb,
+ L >= Lf,
+ not on_stack(V, Stk),
+ not in_reg(V, Regs)].
+
+in_reg(V, Regs) -> keymember(V, 2, Regs).
+
+%% Move away unsaved variables from the registers that are to be
+%% overwritten by the arguments.
+move_unsaved(Vs, OrigRegs, NewRegs) ->
+ move_unsaved(Vs, OrigRegs, NewRegs, []).
+
+move_unsaved([V|Vs], OrigRegs, NewRegs0, Acc) ->
+ NewRegs = put_reg(V, NewRegs0),
+ Src = fetch_reg(V, OrigRegs),
+ Dst = fetch_reg(V, NewRegs),
+ move_unsaved(Vs, OrigRegs, NewRegs, [{move,Src,Dst}|Acc]);
+move_unsaved([], _, Regs, Acc) -> {Acc,Regs}.
+
+%% gen_moves(As, Sr)
+%% Generate the basic move instruction to move the arguments
+%% to their proper registers. The list will be sorted on
+%% destinations. (I.e. the move to {x,0} will be first --
+%% see the comment to order_moves/2.)
+
+gen_moves(As, Sr) -> gen_moves(As, Sr, 0, []).
+
+gen_moves([{var,V}|As], Sr, I, Acc) ->
+ case fetch_var(V, Sr) of
+ {x,I} -> gen_moves(As, Sr, I+1, Acc);
+ Reg -> gen_moves(As, Sr, I+1, [{move,Reg,{x,I}}|Acc])
+ end;
+gen_moves([A|As], Sr, I, Acc) ->
+ gen_moves(As, Sr, I+1, [{move,A,{x,I}}|Acc]);
+gen_moves([], _, _, Acc) -> lists:keysort(3, Acc).
+
+%% order_moves([Move], ScratchReg) -> [Move]
+%% Orders move instruction so that source registers are not
+%% destroyed before they are used. If there are cycles
+%% (such as {move,{x,0},{x,1}}, {move,{x,1},{x,1}}),
+%% the scratch register is used to break up the cycle.
+%% If possible, the first move of the input list is placed
+%% last in the result list (to make the move to {x,0} occur
+%% just before the call to allow the Beam loader to coalesce
+%% the instructions).
+
+order_moves(Ms, Scr) -> order_moves(Ms, Scr, []).
+
+order_moves([{move,_,_}=M|Ms0], ScrReg, Acc0) ->
+ {Chain,Ms} = collect_chain(Ms0, [M], ScrReg),
+ Acc = reverse(Chain, Acc0),
+ order_moves(Ms, ScrReg, Acc);
+order_moves([], _, Acc) -> Acc.
+
+collect_chain(Ms, Path, ScrReg) ->
+ collect_chain(Ms, Path, [], ScrReg).
+
+collect_chain([{move,Src,Same}=M|Ms0], [{move,Same,_}|_]=Path, Others, ScrReg) ->
+ case keysearch(Src, 3, Path) of
+ {value,_} -> %We have a cycle.
+ {break_up_cycle(M, Path, ScrReg),reverse(Others, Ms0)};
+ false ->
+ collect_chain(reverse(Others, Ms0), [M|Path], [], ScrReg)
+ end;
+collect_chain([M|Ms], Path, Others, ScrReg) ->
+ collect_chain(Ms, Path, [M|Others], ScrReg);
+collect_chain([], Path, Others, _) ->
+ {Path,Others}.
+
+break_up_cycle({move,Src,_}=M, Path, ScrReg) ->
+ [{move,ScrReg,Src},M|break_up_cycle1(Src, Path, ScrReg)].
+
+break_up_cycle1(Dst, [{move,Src,Dst}|Path], ScrReg) ->
+ [{move,Src,ScrReg}|Path];
+break_up_cycle1(Dst, [M|Path], LastMove) ->
+ [M|break_up_cycle1(Dst, Path, LastMove)].
+
+%% clear_dead(Sr, Until, Vdb) -> Aft.
+%% Remove all variables in Sr which have died AT ALL so far.
+
+clear_dead(Sr, Until, Vdb) ->
+ Sr#sr{reg=clear_dead_reg(Sr, Until, Vdb),
+ stk=clear_dead_stk(Sr#sr.stk, Until, Vdb)}.
+
+clear_dead_reg(Sr, Until, Vdb) ->
+ Reg = map(fun ({I,V}) ->
+ case vdb_find(V, Vdb) of
+ {V,_,L} when L > Until -> {I,V};
+ _ -> free %Remove anything else
+ end;
+ ({reserved,I,V}) -> {reserved,I,V};
+ (free) -> free
+ end, Sr#sr.reg),
+ reserve(Sr#sr.res, Reg, Sr#sr.stk).
+
+clear_dead_stk(Stk, Until, Vdb) ->
+ map(fun ({V}) ->
+ case vdb_find(V, Vdb) of
+ {V,_,L} when L > Until -> {V};
+ _ -> dead %Remove anything else
+ end;
+ (free) -> free;
+ (dead) -> dead
+ end, Stk).
+
+%% sr_merge(Sr1, Sr2) -> Sr.
+%% Merge two stack/register states keeping the longest of both stack
+%% and register. Perform consistency check on both, elements must be
+%% the same. Allow frame size 'void' to make easy creation of
+%% "empty" frame.
+
+sr_merge(#sr{reg=R1,stk=S1,res=[]}, #sr{reg=R2,stk=S2,res=[]}) ->
+ #sr{reg=longest(R1, R2),stk=longest(S1, S2),res=[]};
+sr_merge(void, S2) -> S2#sr{res=[]};
+sr_merge(S1, void) -> S1#sr{res=[]}.
+
+longest([H|T1], [H|T2]) -> [H|longest(T1, T2)];
+longest([dead|T1], [free|T2]) -> [dead|longest(T1, T2)];
+longest([free|T1], [dead|T2]) -> [dead|longest(T1, T2)];
+longest([dead|T1], []) -> [dead|T1];
+longest([], [dead|T2]) -> [dead|T2];
+longest([free|T1], []) -> [free|T1];
+longest([], [free|T2]) -> [free|T2];
+longest([], []) -> [].
+
+%% adjust_stack(Bef, FirstBefore, LastFrom, Vdb) -> {[Ainstr],Aft}.
+%% Do complete stack adjustment by compressing stack and adding
+%% variables to be saved. Try to optimise ordering on stack by
+%% having reverse order to their lifetimes.
+%%
+%% In Beam, there is a fixed stack frame and no need to do stack compression.
+
+adjust_stack(Bef, Fb, Lf, Vdb) ->
+ Stk0 = Bef#sr.stk,
+ {Stk1,Saves} = save_stack(Stk0, Fb, Lf, Vdb),
+ {saves(Saves, Bef#sr.reg, Stk1),
+ Bef#sr{stk=Stk1}}.
+
+%% save_stack(Stack, FirstBefore, LastFrom, Vdb) -> {[SaveVar],NewStack}.
+%% Save variables which are used past current point and which are not
+%% already on the stack.
+
+save_stack(Stk0, Fb, Lf, Vdb) ->
+ %% New variables that are in use but not on stack.
+ New = [ {V,F,L} || {V,F,L} <- Vdb,
+ F < Fb,
+ L >= Lf,
+ not on_stack(V, Stk0) ],
+ %% Add new variables that are not just dropped immediately.
+ %% N.B. foldr works backwards from the end!!
+ Saves = [ V || {V,_,_} <- keysort(3, New) ],
+ Stk1 = foldr(fun (V, Stk) -> put_stack(V, Stk) end, Stk0, Saves),
+ {Stk1,Saves}.
+
+%% saves([SaveVar], Reg, Stk) -> [{move,Reg,Stk}].
+%% Generate move instructions to save variables onto stack. The
+%% stack/reg info used is that after the new stack has been made.
+
+saves(Ss, Reg, Stk) ->
+ Res = map(fun (V) ->
+ {move,fetch_reg(V, Reg),fetch_stack(V, Stk)}
+ end, Ss),
+ Res.
+
+%% comment(C) -> ['%'{C}].
+
+%comment(C) -> [{'%',C}].
+comment(_) -> [].
+
+%% fetch_var(VarName, StkReg) -> r{R} | sp{Sp}.
+%% find_var(VarName, StkReg) -> ok{r{R} | sp{Sp}} | error.
+%% Fetch/find a variable in either the registers or on the
+%% stack. Fetch KNOWS it's there.
+
+fetch_var(V, Sr) ->
+ case find_reg(V, Sr#sr.reg) of
+ {ok,R} -> R;
+ error -> fetch_stack(V, Sr#sr.stk)
+ end.
+
+% find_var(V, Sr) ->
+% case find_reg(V, Sr#sr.reg) of
+% {ok,R} -> {ok,R};
+% error ->
+% case find_stack(V, Sr#sr.stk) of
+% {ok,S} -> {ok,S};
+% error -> error
+% end
+% end.
+
+load_vars(Vs, Regs) ->
+ foldl(fun ({var,V}, Rs) -> put_reg(V, Rs) end, Regs, Vs).
+
+%% put_reg(Val, Regs) -> Regs.
+%% load_reg(Val, Reg, Regs) -> Regs.
+%% free_reg(Val, Regs) -> Regs.
+%% find_reg(Val, Regs) -> ok{r{R}} | error.
+%% fetch_reg(Val, Regs) -> r{R}.
+%% Functions to interface the registers.
+%% put_reg puts a value into a free register,
+%% load_reg loads a value into a fixed register
+%% free_reg frees a register containing a specific value.
+
+% put_regs(Vs, Rs) -> foldl(fun put_reg/2, Rs, Vs).
+
+put_reg(V, Rs) -> put_reg_1(V, Rs, 0).
+
+put_reg_1(V, [free|Rs], I) -> [{I,V}|Rs];
+put_reg_1(V, [{reserved,I,V}|Rs], I) -> [{I,V}|Rs];
+put_reg_1(V, [R|Rs], I) -> [R|put_reg_1(V, Rs, I+1)];
+put_reg_1(V, [], I) -> [{I,V}].
+
+load_reg(V, R, Rs) -> load_reg_1(V, R, Rs, 0).
+
+load_reg_1(V, I, [_|Rs], I) -> [{I,V}|Rs];
+load_reg_1(V, I, [R|Rs], C) -> [R|load_reg_1(V, I, Rs, C+1)];
+load_reg_1(V, I, [], I) -> [{I,V}];
+load_reg_1(V, I, [], C) -> [free|load_reg_1(V, I, [], C+1)].
+
+% free_reg(V, [{I,V}|Rs]) -> [free|Rs];
+% free_reg(V, [R|Rs]) -> [R|free_reg(V, Rs)];
+% free_reg(V, []) -> [].
+
+fetch_reg(V, [{I,V}|_]) -> {x,I};
+fetch_reg(V, [_|SRs]) -> fetch_reg(V, SRs).
+
+find_reg(V, [{I,V}|_]) -> {ok,{x,I}};
+find_reg(V, [_|SRs]) -> find_reg(V, SRs);
+find_reg(_, []) -> error.
+
+%% For the bit syntax, we need a scratch register if we are constructing
+%% a binary that will not be used.
+
+find_scratch_reg(Rs) -> find_scratch_reg(Rs, 0).
+
+find_scratch_reg([free|_], I) -> {x,I};
+find_scratch_reg([_|Rs], I) -> find_scratch_reg(Rs, I+1);
+find_scratch_reg([], I) -> {x,I}.
+
+%%copy_reg(Val, R, Regs) -> load_reg(Val, R, Regs).
+%%move_reg(Val, R, Regs) -> load_reg(Val, R, free_reg(Val, Regs)).
+
+%%clear_regs(Regs) -> map(fun (R) -> free end, Regs).
+clear_regs(_) -> [].
+
+max_reg(Regs) ->
+ foldl(fun ({I,_}, _) -> I;
+ (_, Max) -> Max end,
+ -1, Regs) + 1.
+
+%% put_stack(Val, [{Val}]) -> [{Val}].
+%% fetch_stack(Var, Stk) -> sp{S}.
+%% find_stack(Var, Stk) -> ok{sp{S}} | error.
+%% Functions to interface the stack.
+
+put_stack(Val, []) -> [{Val}];
+put_stack(Val, [dead|Stk]) -> [{Val}|Stk];
+put_stack(Val, [free|Stk]) -> [{Val}|Stk];
+put_stack(Val, [NotFree|Stk]) -> [NotFree|put_stack(Val, Stk)].
+
+put_stack_carefully(Val, Stk0) ->
+ case catch put_stack_carefully1(Val, Stk0) of
+ error -> error;
+ Stk1 when list(Stk1) -> Stk1
+ end.
+
+put_stack_carefully1(_, []) -> throw(error);
+put_stack_carefully1(Val, [dead|Stk]) -> [{Val}|Stk];
+put_stack_carefully1(Val, [free|Stk]) -> [{Val}|Stk];
+put_stack_carefully1(Val, [NotFree|Stk]) ->
+ [NotFree|put_stack_carefully1(Val, Stk)].
+
+fetch_stack(Var, Stk) -> fetch_stack(Var, Stk, 0).
+
+fetch_stack(V, [{V}|_], I) -> {yy,I};
+fetch_stack(V, [_|Stk], I) -> fetch_stack(V, Stk, I+1).
+
+% find_stack(Var, Stk) -> find_stack(Var, Stk, 0).
+
+% find_stack(V, [{V}|Stk], I) -> {ok,{yy,I}};
+% find_stack(V, [O|Stk], I) -> find_stack(V, Stk, I+1);
+% find_stack(V, [], I) -> error.
+
+on_stack(V, Stk) -> keymember(V, 1, Stk).
+
+%% put_catch(CatchTag, Stack) -> Stack'
+%% drop_catch(CatchTag, Stack) -> Stack'
+%% Special interface for putting and removing catch tags, to ensure that
+%% catches nest properly. Also used for try tags.
+
+put_catch(Tag, Stk0) -> put_catch(Tag, reverse(Stk0), []).
+
+put_catch(Tag, [], Stk) ->
+ put_stack({catch_tag,Tag}, Stk);
+put_catch(Tag, [{{catch_tag,_}}|_]=RevStk, Stk) ->
+ reverse(RevStk, put_stack({catch_tag,Tag}, Stk));
+put_catch(Tag, [Other|Stk], Acc) ->
+ put_catch(Tag, Stk, [Other|Acc]).
+
+drop_catch(Tag, [{{catch_tag,Tag}}|Stk]) -> [free|Stk];
+drop_catch(Tag, [Other|Stk]) -> [Other|drop_catch(Tag, Stk)].
+
+%%%
+%%% Finish the code generation for the bit syntax matching.
+%%%
+
+bs_function({function,Name,Arity,CLabel,Asm0}=Func) ->
+ case bs_needed(Asm0, 0, false, []) of
+ {false,[]} -> Func;
+ {true,Dict} ->
+ Asm = bs_replace(Asm0, Dict, []),
+ {function,Name,Arity,CLabel,Asm}
+ end.
+
+%%%
+%%% Pass 1: Found out which bs_restore's that are needed. For now we assume
+%%% that a bs_restore is needed unless it is directly preceeded by a bs_save.
+%%%
+
+bs_needed([{bs_save,Name},{bs_restore,Name}|T], N, _BsUsed, Dict) ->
+ bs_needed(T, N, true, Dict);
+bs_needed([{bs_save,_Name}|T], N, _BsUsed, Dict) ->
+ bs_needed(T, N, true, Dict);
+bs_needed([{bs_restore,Name}|T], N, _BsUsed, Dict) ->
+ case keysearch(Name, 1, Dict) of
+ {value,{Name,_}} -> bs_needed(T, N, true, Dict);
+ false -> bs_needed(T, N+1, true, [{Name,N}|Dict])
+ end;
+bs_needed([{bs_init,_,_}|T], N, _, Dict) ->
+ bs_needed(T, N, true, Dict);
+bs_needed([{bs_init2,_,_,_,_,_}|T], N, _, Dict) ->
+ bs_needed(T, N, true, Dict);
+bs_needed([{bs_start_match,_,_}|T], N, _, Dict) ->
+ bs_needed(T, N, true, Dict);
+bs_needed([_|T], N, BsUsed, Dict) ->
+ bs_needed(T, N, BsUsed, Dict);
+bs_needed([], _, BsUsed, Dict) -> {BsUsed,Dict}.
+
+%%%
+%%% Pass 2: Only needed if there were some bs_* instructions found.
+%%%
+%%% Remove any bs_save with a name that never were found to be restored
+%%% in the first pass.
+%%%
+
+bs_replace([{bs_save,Name}=Save,{bs_restore,Name}|T], Dict, Acc) ->
+ bs_replace([Save|T], Dict, Acc);
+bs_replace([{bs_save,Name}|T], Dict, Acc) ->
+ case keysearch(Name, 1, Dict) of
+ {value,{Name,N}} ->
+ bs_replace(T, Dict, [{bs_save,N}|Acc]);
+ false ->
+ bs_replace(T, Dict, Acc)
+ end;
+bs_replace([{bs_restore,Name}|T], Dict, Acc) ->
+ case keysearch(Name, 1, Dict) of
+ {value,{Name,N}} ->
+ bs_replace(T, Dict, [{bs_restore,N}|Acc]);
+ false ->
+ bs_replace(T, Dict, Acc)
+ end;
+bs_replace([{bs_init2,Fail,Bytes,Regs,Flags,Dst}|T0], Dict, Acc) ->
+ case bs_find_test_heap(T0) of
+ none ->
+ bs_replace(T0, Dict, [{bs_init2,Fail,Bytes,0,Regs,Flags,Dst}|Acc]);
+ {T,Words} ->
+ bs_replace(T, Dict, [{bs_init2,Fail,Bytes,Words,Regs,Flags,Dst}|Acc])
+ end;
+bs_replace([H|T], Dict, Acc) ->
+ bs_replace(T, Dict, [H|Acc]);
+bs_replace([], _, Acc) -> reverse(Acc).
+
+bs_find_test_heap(Is) ->
+ bs_find_test_heap_1(Is, []).
+
+bs_find_test_heap_1([{bs_put_integer,_,_,_,_,_}=I|Is], Acc) ->
+ bs_find_test_heap_1(Is, [I|Acc]);
+bs_find_test_heap_1([{bs_put_float,_,_,_,_,_}=I|Is], Acc) ->
+ bs_find_test_heap_1(Is, [I|Acc]);
+bs_find_test_heap_1([{bs_put_binary,_,_,_,_,_}=I|Is], Acc) ->
+ bs_find_test_heap_1(Is, [I|Acc]);
+bs_find_test_heap_1([{test_heap,Words,_}|Is], Acc) ->
+ {reverse(Acc, Is),Words};
+bs_find_test_heap_1(_, _) -> none.
+
+%% new_label(St) -> {L,St}.
+
+new_label(St) ->
+ L = St#cg.lcount,
+ {L,St#cg{lcount=L+1}}.
+
+flatmapfoldl(F, Accu0, [Hd|Tail]) ->
+ {R,Accu1} = F(Hd, Accu0),
+ {Rs,Accu2} = flatmapfoldl(F, Accu1, Tail),
+ {R++Rs,Accu2};
+flatmapfoldl(_, Accu, []) -> {[],Accu}.
+
+flatmapfoldr(F, Accu0, [Hd|Tail]) ->
+ {Rs,Accu1} = flatmapfoldr(F, Accu0, Tail),
+ {R,Accu2} = F(Hd, Accu1),
+ {R++Rs,Accu2};
+flatmapfoldr(_, Accu, []) -> {[],Accu}.
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_core.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_core.erl
new file mode 100644
index 0000000000..b561182932
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_core.erl
@@ -0,0 +1,1320 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: v3_core.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
+%%
+%% Purpose : Transform normal Erlang to Core Erlang
+
+%% At this stage all preprocessing has been done. All that is left are
+%% "pure" Erlang functions.
+%%
+%% Core transformation is done in three stages:
+%%
+%% 1. Flatten expressions into an internal core form without doing
+%% matching.
+%%
+%% 2. Step "forwards" over the icore code annotating each "top-level"
+%% thing with variable usage. Detect bound variables in matching
+%% and replace with explicit guard test. Annotate "internal-core"
+%% expressions with variables they use and create. Convert matches
+%% to cases when not pure assignments.
+%%
+%% 3. Step "backwards" over icore code using variable usage
+%% annotations to change implicit exported variables to explicit
+%% returns.
+%%
+%% To ensure the evaluation order we ensure that all arguments are
+%% safe. A "safe" is basically a core_lib simple with VERY restricted
+%% binaries.
+%%
+%% We have to be very careful with matches as these create variables.
+%% While we try not to flatten things more than necessary we must make
+%% sure that all matches are at the top level. For this we use the
+%% type "novars" which are non-match expressions. Cases and receives
+%% can also create problems due to exports variables so they are not
+%% "novars" either. I.e. a novars will not export variables.
+%%
+%% Annotations in the #iset, #iletrec, and all other internal records
+%% is kept in a record, #a, not in a list as in proper core. This is
+%% easier and faster and creates no problems as we have complete control
+%% over all annotations.
+%%
+%% On output, the annotation for most Core Erlang terms will contain
+%% the source line number. A few terms will be marked with the atom
+%% atom 'compiler_generated', to indicate that the compiler has generated
+%% them and that no warning should be generated if they are optimized
+%% away.
+%%
+%%
+%% In this translation:
+%%
+%% call ops are safes
+%% call arguments are safes
+%% match arguments are novars
+%% case arguments are novars
+%% receive timeouts are novars
+%% let/set arguments are expressions
+%% fun is not a safe
+
+-module(v3_core).
+
+-export([module/2,format_error/1]).
+
+-import(lists, [map/2,foldl/3,foldr/3,mapfoldl/3,splitwith/2]).
+-import(ordsets, [add_element/2,del_element/2,is_element/2,
+ union/1,union/2,intersection/2,subtract/2]).
+
+-include("core_parse.hrl").
+
+-record(a, {us=[],ns=[],anno=[]}). %Internal annotation
+
+%% Internal core expressions and help functions.
+%% N.B. annotations fields in place as normal Core expressions.
+
+-record(iset, {anno=#a{},var,arg}).
+-record(iletrec, {anno=#a{},defs,body}).
+-record(imatch, {anno=#a{},pat,guard=[],arg,fc}).
+-record(icase, {anno=#a{},args,clauses,fc}).
+-record(iclause, {anno=#a{},pats,pguard=[],guard,body}).
+-record(ifun, {anno=#a{},id,vars,clauses,fc}).
+-record(iapply, {anno=#a{},op,args}).
+-record(icall, {anno=#a{},module,name,args}).
+-record(iprimop, {anno=#a{},name,args}).
+-record(itry, {anno=#a{},args,vars,body,evars,handler}).
+-record(icatch, {anno=#a{},body}).
+-record(ireceive1, {anno=#a{},clauses}).
+-record(ireceive2, {anno=#a{},clauses,timeout,action}).
+-record(iprotect, {anno=#a{},body}).
+-record(ibinary, {anno=#a{},segments}). %Not used in patterns.
+
+-record(core, {vcount=0, %Variable counter
+ fcount=0, %Function counter
+ ws=[]}). %Warnings.
+
+module({Mod,Exp,Forms}, _Opts) ->
+ Cexp = map(fun ({N,A}) -> #c_fname{id=N,arity=A} end, Exp),
+ {Kfs,As,Ws} = foldr(fun form/2, {[],[],[]}, Forms),
+ {ok,#c_module{name=#c_atom{val=Mod},exports=Cexp,attrs=As,defs=Kfs},Ws}.
+
+form({function,_,_,_,_}=F0, {Fs,As,Ws0}) ->
+ {F,Ws} = function(F0, Ws0),
+ {[F|Fs],As,Ws};
+form({attribute,_,_,_}=F, {Fs,As,Ws}) ->
+ {Fs,[attribute(F)|As],Ws}.
+
+attribute({attribute,_,Name,Val}) ->
+ #c_def{name=core_lib:make_literal(Name),
+ val=core_lib:make_literal(Val)}.
+
+function({function,_,Name,Arity,Cs0}, Ws0) ->
+ %%ok = io:fwrite("~p - ", [{Name,Arity}]),
+ St0 = #core{vcount=0,ws=Ws0},
+ {B0,St1} = body(Cs0, Arity, St0),
+ %%ok = io:fwrite("1", []),
+ %%ok = io:fwrite("~w:~p~n", [?LINE,B0]),
+ {B1,St2} = ubody(B0, St1),
+ %%ok = io:fwrite("2", []),
+ %%ok = io:fwrite("~w:~p~n", [?LINE,B1]),
+ {B2,#core{ws=Ws}} = cbody(B1, St2),
+ %%ok = io:fwrite("3~n", []),
+ {#c_def{name=#c_fname{id=Name,arity=Arity},val=B2},Ws}.
+
+body(Cs0, Arity, St0) ->
+ Anno = [element(2, hd(Cs0))],
+ {Args,St1} = new_vars(Anno, Arity, St0),
+ {Cs1,St2} = clauses(Cs0, St1),
+ {Ps,St3} = new_vars(Arity, St2), %Need new variables here
+ Fc = fail_clause(Ps, #c_tuple{es=[#c_atom{val=function_clause}|Ps]}),
+ {#ifun{anno=#a{anno=Anno},id=[],vars=Args,clauses=Cs1,fc=Fc},St3}.
+
+%% clause(Clause, State) -> {Cclause,State} | noclause.
+%% clauses([Clause], State) -> {[Cclause],State}.
+%% Convert clauses. Trap bad pattern aliases and remove clause from
+%% clause list.
+
+clauses([C0|Cs0], St0) ->
+ case clause(C0, St0) of
+ {noclause,St} -> clauses(Cs0, St);
+ {C,St1} ->
+ {Cs,St2} = clauses(Cs0, St1),
+ {[C|Cs],St2}
+ end;
+clauses([], St) -> {[],St}.
+
+clause({clause,Lc,H0,G0,B0}, St0) ->
+ case catch head(H0) of
+ {'EXIT',_}=Exit -> exit(Exit); %Propagate error
+ nomatch ->
+ St = add_warning(Lc, nomatch, St0),
+ {noclause,St}; %Bad pattern
+ H1 ->
+ {G1,St1} = guard(G0, St0),
+ {B1,St2} = exprs(B0, St1),
+ {#iclause{anno=#a{anno=[Lc]},pats=H1,guard=G1,body=B1},St2}
+ end.
+
+%% head([P]) -> [P].
+
+head(Ps) -> pattern_list(Ps).
+
+%% guard([Expr], State) -> {[Cexpr],State}.
+%% Build an explict and/or tree of guard alternatives, then traverse
+%% top-level and/or tree and "protect" inner tests.
+
+guard([], St) -> {[],St};
+guard(Gs0, St) ->
+ Gs = foldr(fun (Gt0, Rhs) ->
+ Gt1 = guard_tests(Gt0),
+ L = element(2, Gt1),
+ {op,L,'or',Gt1,Rhs}
+ end, guard_tests(last(Gs0)), first(Gs0)),
+ gexpr_top(Gs, St).
+
+guard_tests([]) -> [];
+guard_tests(Gs) ->
+ L = element(2, hd(Gs)),
+ {protect,L,foldr(fun (G, Rhs) -> {op,L,'and',G,Rhs} end, last(Gs), first(Gs))}.
+
+%% gexpr_top(Expr, State) -> {Cexpr,State}.
+%% Generate an internal core expression of a guard test. Explicitly
+%% handle outer boolean expressions and "protect" inner tests in a
+%% reasonably smart way.
+
+gexpr_top(E0, St0) ->
+ {E1,Eps0,Bools,St1} = gexpr(E0, [], St0),
+ {E,Eps,St} = force_booleans(Bools, E1, Eps0, St1),
+ {Eps++[E],St}.
+
+%% gexpr(Expr, Bools, State) -> {Cexpr,[PreExp],Bools,State}.
+%% Generate an internal core expression of a guard test.
+
+gexpr({protect,Line,Arg}, Bools0, St0) ->
+ case gexpr(Arg, [], St0) of
+ {E0,[],Bools,St1} ->
+ {E,Eps,St} = force_booleans(Bools, E0, [], St1),
+ {E,Eps,Bools0,St};
+ {E0,Eps0,Bools,St1} ->
+ {E,Eps,St} = force_booleans(Bools, E0, Eps0, St1),
+ {#iprotect{anno=#a{anno=[Line]},body=Eps++[E]},[],Bools0,St}
+ end;
+gexpr({op,Line,Op,L,R}=Call, Bools0, St0) ->
+ case erl_internal:bool_op(Op, 2) of
+ true ->
+ {Le,Lps,Bools1,St1} = gexpr(L, Bools0, St0),
+ {Ll,Llps,St2} = force_safe(Le, St1),
+ {Re,Rps,Bools,St3} = gexpr(R, Bools1, St2),
+ {Rl,Rlps,St4} = force_safe(Re, St3),
+ Anno = [Line],
+ {#icall{anno=#a{anno=Anno}, %Must have an #a{}
+ module=#c_atom{anno=Anno,val=erlang},name=#c_atom{anno=Anno,val=Op},
+ args=[Ll,Rl]},Lps ++ Llps ++ Rps ++ Rlps,Bools,St4};
+ false ->
+ gexpr_test(Call, Bools0, St0)
+ end;
+gexpr({op,Line,Op,A}=Call, Bools0, St0) ->
+ case erl_internal:bool_op(Op, 1) of
+ true ->
+ {Ae,Aps,Bools,St1} = gexpr(A, Bools0, St0),
+ {Al,Alps,St2} = force_safe(Ae, St1),
+ Anno = [Line],
+ {#icall{anno=#a{anno=Anno}, %Must have an #a{}
+ module=#c_atom{anno=Anno,val=erlang},name=#c_atom{anno=Anno,val=Op},
+ args=[Al]},Aps ++ Alps,Bools,St2};
+ false ->
+ gexpr_test(Call, Bools0, St0)
+ end;
+gexpr(E0, Bools, St0) ->
+ gexpr_test(E0, Bools, St0).
+
+%% gexpr_test(Expr, Bools, State) -> {Cexpr,[PreExp],Bools,State}.
+%% Generate a guard test. At this stage we must be sure that we have
+%% a proper boolean value here so wrap things with an true test if we
+%% don't know, i.e. if it is not a comparison or a type test.
+
+gexpr_test({atom,L,true}, Bools, St0) ->
+ {#c_atom{anno=[L],val=true},[],Bools,St0};
+gexpr_test({atom,L,false}, Bools, St0) ->
+ {#c_atom{anno=[L],val=false},[],Bools,St0};
+gexpr_test(E0, Bools0, St0) ->
+ {E1,Eps0,St1} = expr(E0, St0),
+ %% Generate "top-level" test and argument calls.
+ case E1 of
+ #icall{anno=Anno,module=#c_atom{val=erlang},name=#c_atom{val=N},args=As} ->
+ Ar = length(As),
+ case erl_internal:type_test(N, Ar) orelse
+ erl_internal:comp_op(N, Ar) orelse
+ (N == internal_is_record andalso Ar == 3) of
+ true -> {E1,Eps0,Bools0,St1};
+ false ->
+ Lanno = Anno#a.anno,
+ {New,St2} = new_var(Lanno, St1),
+ Bools = [New|Bools0],
+ {#icall{anno=Anno, %Must have an #a{}
+ module=#c_atom{anno=Lanno,val=erlang},
+ name=#c_atom{anno=Lanno,val='=:='},
+ args=[New,#c_atom{anno=Lanno,val=true}]},
+ Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
+ end;
+ _ ->
+ Anno = get_ianno(E1),
+ Lanno = get_lineno_anno(E1),
+ case core_lib:is_simple(E1) of
+ true ->
+ Bools = [E1|Bools0],
+ {#icall{anno=Anno, %Must have an #a{}
+ module=#c_atom{anno=Lanno,val=erlang},
+ name=#c_atom{anno=Lanno,val='=:='},
+ args=[E1,#c_atom{anno=Lanno,val=true}]},Eps0,Bools,St1};
+ false ->
+ {New,St2} = new_var(Lanno, St1),
+ Bools = [New|Bools0],
+ {#icall{anno=Anno, %Must have an #a{}
+ module=#c_atom{anno=Lanno,val=erlang},
+ name=#c_atom{anno=Lanno,val='=:='},
+ args=[New,#c_atom{anno=Lanno,val=true}]},
+ Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
+ end
+ end.
+
+force_booleans([], E, Eps, St) ->
+ {E,Eps,St};
+force_booleans([V|Vs], E0, Eps0, St0) ->
+ {E1,Eps1,St1} = force_safe(E0, St0),
+ Lanno = element(2, V),
+ Anno = #a{anno=Lanno},
+ Call = #icall{anno=Anno,module=#c_atom{anno=Lanno,val=erlang},
+ name=#c_atom{anno=Lanno,val=is_boolean},
+ args=[V]},
+ {New,St} = new_var(Lanno, St1),
+ Iset = #iset{anno=Anno,var=New,arg=Call},
+ Eps = Eps0 ++ Eps1 ++ [Iset],
+ E = #icall{anno=Anno,
+ module=#c_atom{anno=Lanno,val=erlang},name=#c_atom{anno=Lanno,val='and'},
+ args=[E1,New]},
+ force_booleans(Vs, E, Eps, St).
+
+%% exprs([Expr], State) -> {[Cexpr],State}.
+%% Flatten top-level exprs.
+
+exprs([E0|Es0], St0) ->
+ {E1,Eps,St1} = expr(E0, St0),
+ {Es1,St2} = exprs(Es0, St1),
+ {Eps ++ [E1] ++ Es1,St2};
+exprs([], St) -> {[],St}.
+
+%% expr(Expr, State) -> {Cexpr,[PreExp],State}.
+%% Generate an internal core expression.
+
+expr({var,L,V}, St) -> {#c_var{anno=[L],name=V},[],St};
+expr({char,L,C}, St) -> {#c_char{anno=[L],val=C},[],St};
+expr({integer,L,I}, St) -> {#c_int{anno=[L],val=I},[],St};
+expr({float,L,F}, St) -> {#c_float{anno=[L],val=F},[],St};
+expr({atom,L,A}, St) -> {#c_atom{anno=[L],val=A},[],St};
+expr({nil,L}, St) -> {#c_nil{anno=[L]},[],St};
+expr({string,L,S}, St) -> {#c_string{anno=[L],val=S},[],St};
+expr({cons,L,H0,T0}, St0) ->
+ {H1,Hps,St1} = safe(H0, St0),
+ {T1,Tps,St2} = safe(T0, St1),
+ {#c_cons{anno=[L],hd=H1,tl=T1},Hps ++ Tps,St2};
+expr({lc,L,E,Qs}, St) ->
+ lc_tq(L, E, Qs, {nil,L}, St);
+expr({tuple,L,Es0}, St0) ->
+ {Es1,Eps,St1} = safe_list(Es0, St0),
+ {#c_tuple{anno=[L],es=Es1},Eps,St1};
+expr({bin,L,Es0}, St0) ->
+ {Es1,Eps,St1} = expr_bin(Es0, St0),
+ {#ibinary{anno=#a{anno=[L]},segments=Es1},Eps,St1};
+expr({block,_,Es0}, St0) ->
+ %% Inline the block directly.
+ {Es1,St1} = exprs(first(Es0), St0),
+ {E1,Eps,St2} = expr(last(Es0), St1),
+ {E1,Es1 ++ Eps,St2};
+expr({'if',L,Cs0}, St0) ->
+ {Cs1,St1} = clauses(Cs0, St0),
+ Fc = fail_clause([], #c_atom{val=if_clause}),
+ {#icase{anno=#a{anno=[L]},args=[],clauses=Cs1,fc=Fc},[],St1};
+expr({'case',L,E0,Cs0}, St0) ->
+ {E1,Eps,St1} = novars(E0, St0),
+ {Cs1,St2} = clauses(Cs0, St1),
+ {Fpat,St3} = new_var(St2),
+ Fc = fail_clause([Fpat], #c_tuple{es=[#c_atom{val=case_clause},Fpat]}),
+ {#icase{anno=#a{anno=[L]},args=[E1],clauses=Cs1,fc=Fc},Eps,St3};
+expr({'receive',L,Cs0}, St0) ->
+ {Cs1,St1} = clauses(Cs0, St0),
+ {#ireceive1{anno=#a{anno=[L]},clauses=Cs1}, [], St1};
+expr({'receive',L,Cs0,Te0,Tes0}, St0) ->
+ {Te1,Teps,St1} = novars(Te0, St0),
+ {Tes1,St2} = exprs(Tes0, St1),
+ {Cs1,St3} = clauses(Cs0, St2),
+ {#ireceive2{anno=#a{anno=[L]},
+ clauses=Cs1,timeout=Te1,action=Tes1},Teps,St3};
+expr({'try',L,Es0,[],Ecs,[]}, St0) ->
+ %% 'try ... catch ... end'
+ {Es1,St1} = exprs(Es0, St0),
+ {V,St2} = new_var(St1), %This name should be arbitrary
+ {Evs,Hs,St3} = try_exception(Ecs, St2),
+ {#itry{anno=#a{anno=[L]},args=Es1,vars=[V],body=[V],
+ evars=Evs,handler=Hs},
+ [],St3};
+expr({'try',L,Es0,Cs0,Ecs,[]}, St0) ->
+ %% 'try ... of ... catch ... end'
+ {Es1,St1} = exprs(Es0, St0),
+ {V,St2} = new_var(St1), %This name should be arbitrary
+ {Cs1,St3} = clauses(Cs0, St2),
+ {Fpat,St4} = new_var(St3),
+ Fc = fail_clause([Fpat], #c_tuple{es=[#c_atom{val=try_clause},Fpat]}),
+ {Evs,Hs,St5} = try_exception(Ecs, St4),
+ {#itry{anno=#a{anno=[L]},args=Es1,
+ vars=[V],body=[#icase{anno=#a{},args=[V],clauses=Cs1,fc=Fc}],
+ evars=Evs,handler=Hs},
+ [],St5};
+expr({'try',L,Es0,[],[],As0}, St0) ->
+ %% 'try ... after ... end'
+ {Es1,St1} = exprs(Es0, St0),
+ {As1,St2} = exprs(As0, St1),
+ {Evs,Hs,St3} = try_after(As1,St2),
+ {V,St4} = new_var(St3), % (must not exist in As1)
+ %% TODO: this duplicates the 'after'-code; should lift to function.
+ {#itry{anno=#a{anno=[L]},args=Es1,vars=[V],body=As1++[V],
+ evars=Evs,handler=Hs},
+ [],St4};
+expr({'try',L,Es,Cs,Ecs,As}, St0) ->
+ %% 'try ... [of ...] [catch ...] after ... end'
+ expr({'try',L,[{'try',L,Es,Cs,Ecs,[]}],[],[],As}, St0);
+expr({'catch',L,E0}, St0) ->
+ {E1,Eps,St1} = expr(E0, St0),
+ {#icatch{anno=#a{anno=[L]},body=Eps ++ [E1]},[],St1};
+expr({'fun',L,{function,F,A},{_,_,_}=Id}, St) ->
+ {#c_fname{anno=[L,{id,Id}],id=F,arity=A},[],St};
+expr({'fun',L,{clauses,Cs},Id}, St) ->
+ fun_tq(Id, Cs, L, St);
+expr({call,L0,{remote,_,{atom,_,erlang},{atom,_,is_record}},[_,_,_]=As}, St)
+ when L0 < 0 ->
+ %% Compiler-generated erlang:is_record/3 should be converted to
+ %% erlang:internal_is_record/3.
+ L = -L0,
+ expr({call,L,{remote,L,{atom,L,erlang},{atom,L,internal_is_record}},As}, St);
+expr({call,L,{remote,_,M,F},As0}, St0) ->
+ {[M1,F1|As1],Aps,St1} = safe_list([M,F|As0], St0),
+ {#icall{anno=#a{anno=[L]},module=M1,name=F1,args=As1},Aps,St1};
+expr({call,Lc,{atom,Lf,F},As0}, St0) ->
+ {As1,Aps,St1} = safe_list(As0, St0),
+ Op = #c_fname{anno=[Lf],id=F,arity=length(As1)},
+ {#iapply{anno=#a{anno=[Lc]},op=Op,args=As1},Aps,St1};
+expr({call,L,FunExp,As0}, St0) ->
+ {Fun,Fps,St1} = safe(FunExp, St0),
+ {As1,Aps,St2} = safe_list(As0, St1),
+ {#iapply{anno=#a{anno=[L]},op=Fun,args=As1},Fps ++ Aps,St2};
+expr({match,L,P0,E0}, St0) ->
+ %% First fold matches together to create aliases.
+ {P1,E1} = fold_match(E0, P0),
+ {E2,Eps,St1} = novars(E1, St0),
+ P2 = (catch pattern(P1)),
+ {Fpat,St2} = new_var(St1),
+ Fc = fail_clause([Fpat], #c_tuple{es=[#c_atom{val=badmatch},Fpat]}),
+ case P2 of
+ {'EXIT',_}=Exit -> exit(Exit); %Propagate error
+ nomatch ->
+ St = add_warning(L, nomatch, St2),
+ {#icase{anno=#a{anno=[L]},
+ args=[E2],clauses=[],fc=Fc},Eps,St};
+ _Other ->
+ {#imatch{anno=#a{anno=[L]},pat=P2,arg=E2,fc=Fc},Eps,St2}
+ end;
+expr({op,_,'++',{lc,Llc,E,Qs},L2}, St) ->
+ %% Optimise this here because of the list comprehension algorithm.
+ lc_tq(Llc, E, Qs, L2, St);
+expr({op,L,Op,A0}, St0) ->
+ {A1,Aps,St1} = safe(A0, St0),
+ LineAnno = [L],
+ {#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
+ module=#c_atom{anno=LineAnno,val=erlang},
+ name=#c_atom{anno=LineAnno,val=Op},args=[A1]},Aps,St1};
+expr({op,L,Op,L0,R0}, St0) ->
+ {As,Aps,St1} = safe_list([L0,R0], St0),
+ LineAnno = [L],
+ {#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
+ module=#c_atom{anno=LineAnno,val=erlang},
+ name=#c_atom{anno=LineAnno,val=Op},args=As},Aps,St1}.
+
+%% try_exception([ExcpClause], St) -> {[ExcpVar],Handler,St}.
+
+try_exception(Ecs0, St0) ->
+ %% Note that Tag is not needed for rethrow - it is already in Info.
+ {Evs,St1} = new_vars(3, St0), % Tag, Value, Info
+ {Ecs1,St2} = clauses(Ecs0, St1),
+ [_,Value,Info] = Evs,
+ Ec = #iclause{anno=#a{anno=[compiler_generated]},
+ pats=[#c_tuple{es=Evs}],guard=[#c_atom{val=true}],
+ body=[#iprimop{anno=#a{}, %Must have an #a{}
+ name=#c_atom{val=raise},
+ args=[Info,Value]}]},
+ Hs = [#icase{anno=#a{},args=[#c_tuple{es=Evs}],clauses=Ecs1,fc=Ec}],
+ {Evs,Hs,St2}.
+
+try_after(As, St0) ->
+ %% See above.
+ {Evs,St1} = new_vars(3, St0), % Tag, Value, Info
+ [_,Value,Info] = Evs,
+ B = As ++ [#iprimop{anno=#a{}, %Must have an #a{}
+ name=#c_atom{val=raise},
+ args=[Info,Value]}],
+ Ec = #iclause{anno=#a{anno=[compiler_generated]},
+ pats=[#c_tuple{es=Evs}],guard=[#c_atom{val=true}],
+ body=B},
+ Hs = [#icase{anno=#a{},args=[#c_tuple{es=Evs}],clauses=[],fc=Ec}],
+ {Evs,Hs,St1}.
+
+%% expr_bin([ArgExpr], St) -> {[Arg],[PreExpr],St}.
+%% Flatten the arguments of a bin. Do this straight left to right!
+
+expr_bin(Es, St) ->
+ foldr(fun (E, {Ces,Esp,St0}) ->
+ {Ce,Ep,St1} = bitstr(E, St0),
+ {[Ce|Ces],Ep ++ Esp,St1}
+ end, {[],[],St}, Es).
+
+bitstr({bin_element,_,E0,Size0,[Type,{unit,Unit}|Flags]}, St0) ->
+ {E1,Eps,St1} = safe(E0, St0),
+ {Size1,Eps2,St2} = safe(Size0, St1),
+ {#c_bitstr{val=E1,size=Size1,
+ unit=core_lib:make_literal(Unit),
+ type=core_lib:make_literal(Type),
+ flags=core_lib:make_literal(Flags)},
+ Eps ++ Eps2,St2}.
+
+%% fun_tq(Id, [Clauses], Line, State) -> {Fun,[PreExp],State}.
+
+fun_tq(Id, Cs0, L, St0) ->
+ {Cs1,St1} = clauses(Cs0, St0),
+ Arity = length((hd(Cs1))#iclause.pats),
+ {Args,St2} = new_vars(Arity, St1),
+ {Ps,St3} = new_vars(Arity, St2), %Need new variables here
+ Fc = fail_clause(Ps, #c_tuple{es=[#c_atom{val=function_clause}|Ps]}),
+ Fun = #ifun{anno=#a{anno=[L]},
+ id=[{id,Id}], %We KNOW!
+ vars=Args,clauses=Cs1,fc=Fc},
+ {Fun,[],St3}.
+
+%% lc_tq(Line, Exp, [Qualifier], More, State) -> {LetRec,[PreExp],State}.
+%% This TQ from Simon PJ pp 127-138.
+%% This gets a bit messy as we must transform all directly here. We
+%% recognise guard tests and try to fold them together and join to a
+%% preceding generators, this should give us better and more compact
+%% code.
+%% More could be transformed before calling lc_tq.
+
+lc_tq(Line, E, [{generate,Lg,P,G}|Qs0], More, St0) ->
+ {Gs,Qs1} = splitwith(fun is_guard_test/1, Qs0),
+ {Name,St1} = new_fun_name("lc", St0),
+ {Head,St2} = new_var(St1),
+ {Tname,St3} = new_var_name(St2),
+ LA = [Line],
+ LAnno = #a{anno=LA},
+ Tail = #c_var{anno=LA,name=Tname},
+ {Arg,St4} = new_var(St3),
+ NewMore = {call,Lg,{atom,Lg,Name},[{var,Lg,Tname}]},
+ {Guardc,St5} = lc_guard_tests(Gs, St4), %These are always flat!
+ {Lc,Lps,St6} = lc_tq(Line, E, Qs1, NewMore, St5),
+ {Mc,Mps,St7} = expr(More, St6),
+ {Nc,Nps,St8} = expr(NewMore, St7),
+ case catch pattern(P) of
+ {'EXIT',_}=Exit ->
+ St9 = St8,
+ Pc = nomatch,
+ exit(Exit); %Propagate error
+ nomatch ->
+ St9 = add_warning(Line, nomatch, St8),
+ Pc = nomatch;
+ Pc ->
+ St9 = St8
+ end,
+ {Gc,Gps,St10} = safe(G, St9), %Will be a function argument!
+ Fc = fail_clause([Arg], #c_tuple{anno=LA,
+ es=[#c_atom{val=function_clause},Arg]}),
+ Cs0 = [#iclause{anno=#a{anno=[compiler_generated|LA]},
+ pats=[#c_cons{anno=LA,hd=Head,tl=Tail}],
+ guard=[],
+ body=Nps ++ [Nc]},
+ #iclause{anno=LAnno,
+ pats=[#c_nil{anno=LA}],guard=[],
+ body=Mps ++ [Mc]}],
+ Cs = case Pc of
+ nomatch -> Cs0;
+ _ ->
+ [#iclause{anno=LAnno,
+ pats=[#c_cons{anno=LA,hd=Pc,tl=Tail}],
+ guard=Guardc,
+ body=Lps ++ [Lc]}|Cs0]
+ end,
+ Fun = #ifun{anno=LAnno,id=[],vars=[Arg],clauses=Cs,fc=Fc},
+ {#iletrec{anno=LAnno,defs=[{Name,Fun}],
+ body=Gps ++ [#iapply{anno=LAnno,
+ op=#c_fname{anno=LA,id=Name,arity=1},
+ args=[Gc]}]},
+ [],St10};
+lc_tq(Line, E, [Fil0|Qs0], More, St0) ->
+ %% Special case sequences guard tests.
+ LA = [Line],
+ LAnno = #a{anno=LA},
+ case is_guard_test(Fil0) of
+ true ->
+ {Gs0,Qs1} = splitwith(fun is_guard_test/1, Qs0),
+ {Lc,Lps,St1} = lc_tq(Line, E, Qs1, More, St0),
+ {Mc,Mps,St2} = expr(More, St1),
+ {Gs,St3} = lc_guard_tests([Fil0|Gs0], St2), %These are always flat!
+ {#icase{anno=LAnno,
+ args=[],
+ clauses=[#iclause{anno=LAnno,pats=[],
+ guard=Gs,body=Lps ++ [Lc]}],
+ fc=#iclause{anno=LAnno,pats=[],guard=[],body=Mps ++ [Mc]}},
+ [],St3};
+ false ->
+ {Lc,Lps,St1} = lc_tq(Line, E, Qs0, More, St0),
+ {Mc,Mps,St2} = expr(More, St1),
+ {Fpat,St3} = new_var(St2),
+ Fc = fail_clause([Fpat], #c_tuple{es=[#c_atom{val=case_clause},Fpat]}),
+ %% Do a novars little optimisation here.
+ case Fil0 of
+ {op,_,'not',Fil1} ->
+ {Filc,Fps,St4} = novars(Fil1, St3),
+ {#icase{anno=LAnno,
+ args=[Filc],
+ clauses=[#iclause{anno=LAnno,
+ pats=[#c_atom{anno=LA,val=true}],
+ guard=[],
+ body=Mps ++ [Mc]},
+ #iclause{anno=LAnno,
+ pats=[#c_atom{anno=LA,val=false}],
+ guard=[],
+ body=Lps ++ [Lc]}],
+ fc=Fc},
+ Fps,St4};
+ _Other ->
+ {Filc,Fps,St4} = novars(Fil0, St3),
+ {#icase{anno=LAnno,
+ args=[Filc],
+ clauses=[#iclause{anno=LAnno,
+ pats=[#c_atom{anno=LA,val=true}],
+ guard=[],
+ body=Lps ++ [Lc]},
+ #iclause{anno=LAnno,
+ pats=[#c_atom{anno=LA,val=false}],
+ guard=[],
+ body=Mps ++ [Mc]}],
+ fc=Fc},
+ Fps,St4}
+ end
+ end;
+lc_tq(Line, E, [], More, St) ->
+ expr({cons,Line,E,More}, St).
+
+lc_guard_tests([], St) -> {[],St};
+lc_guard_tests(Gs0, St) ->
+ Gs = guard_tests(Gs0),
+ gexpr_top(Gs, St).
+
+%% is_guard_test(Expression) -> true | false.
+%% Test if a general expression is a guard test. Use erl_lint here
+%% as it now allows sys_pre_expand transformed source.
+
+is_guard_test(E) -> erl_lint:is_guard_test(E).
+
+%% novars(Expr, State) -> {Novars,[PreExpr],State}.
+%% Generate a novars expression, basically a call or a safe. At this
+%% level we do not need to do a deep check.
+
+novars(E0, St0) ->
+ {E1,Eps,St1} = expr(E0, St0),
+ {Se,Sps,St2} = force_novars(E1, St1),
+ {Se,Eps ++ Sps,St2}.
+
+force_novars(#iapply{}=App, St) -> {App,[],St};
+force_novars(#icall{}=Call, St) -> {Call,[],St};
+force_novars(#iprimop{}=Prim, St) -> {Prim,[],St};
+force_novars(#ifun{}=Fun, St) -> {Fun,[],St}; %These are novars too
+force_novars(#ibinary{}=Bin, St) -> {Bin,[],St};
+force_novars(Ce, St) ->
+ force_safe(Ce, St).
+
+%% safe(Expr, State) -> {Safe,[PreExpr],State}.
+%% Generate an internal safe expression. These are simples without
+%% binaries which can fail. At this level we do not need to do a
+%% deep check. Must do special things with matches here.
+
+safe(E0, St0) ->
+ {E1,Eps,St1} = expr(E0, St0),
+ {Se,Sps,St2} = force_safe(E1, St1),
+ {Se,Eps ++ Sps,St2}.
+
+safe_list(Es, St) ->
+ foldr(fun (E, {Ces,Esp,St0}) ->
+ {Ce,Ep,St1} = safe(E, St0),
+ {[Ce|Ces],Ep ++ Esp,St1}
+ end, {[],[],St}, Es).
+
+force_safe(#imatch{anno=Anno,pat=P,arg=E,fc=Fc}, St0) ->
+ {Le,Lps,St1} = force_safe(E, St0),
+ {Le,Lps ++ [#imatch{anno=Anno,pat=P,arg=Le,fc=Fc}],St1};
+force_safe(Ce, St0) ->
+ case is_safe(Ce) of
+ true -> {Ce,[],St0};
+ false ->
+ {V,St1} = new_var(St0),
+ {V,[#iset{var=V,arg=Ce}],St1}
+ end.
+
+is_safe(#c_cons{}) -> true;
+is_safe(#c_tuple{}) -> true;
+is_safe(#c_var{}) -> true;
+is_safe(E) -> core_lib:is_atomic(E).
+
+%%% %% variable(Expr, State) -> {Variable,[PreExpr],State}.
+%%% %% force_variable(Expr, State) -> {Variable,[PreExpr],State}.
+%%% %% Generate a variable.
+
+%%% variable(E0, St0) ->
+%%% {E1,Eps,St1} = expr(E0, St0),
+%%% {V,Vps,St2} = force_variable(E1, St1),
+%%% {V,Eps ++ Vps,St2}.
+
+%%% force_variable(#c_var{}=Var, St) -> {Var,[],St};
+%%% force_variable(Ce, St0) ->
+%%% {V,St1} = new_var(St0),
+%%% {V,[#iset{var=V,arg=Ce}],St1}.
+
+%%% %% atomic(Expr, State) -> {Atomic,[PreExpr],State}.
+%%% %% force_atomic(Expr, State) -> {Atomic,[PreExpr],State}.
+
+%%% atomic(E0, St0) ->
+%%% {E1,Eps,St1} = expr(E0, St0),
+%%% {A,Aps,St2} = force_atomic(E1, St1),
+%%% {A,Eps ++ Aps,St2}.
+
+%%% force_atomic(Ce, St0) ->
+%%% case core_lib:is_atomic(Ce) of
+%%% true -> {Ce,[],St0};
+%%% false ->
+%%% {V,St1} = new_var(St0),
+%%% {V,[#iset{var=V,arg=Ce}],St1}
+%%% end.
+
+%% fold_match(MatchExpr, Pat) -> {MatchPat,Expr}.
+%% Fold nested matches into one match with aliased patterns.
+
+fold_match({match,L,P0,E0}, P) ->
+ {P1,E1} = fold_match(E0, P),
+ {{match,L,P0,P1},E1};
+fold_match(E, P) -> {P,E}.
+
+%% pattern(Pattern) -> CorePat.
+%% Transform a pattern by removing line numbers. We also normalise
+%% aliases in patterns to standard form, {alias,Pat,[Var]}.
+
+pattern({var,L,V}) -> #c_var{anno=[L],name=V};
+pattern({char,L,C}) -> #c_char{anno=[L],val=C};
+pattern({integer,L,I}) -> #c_int{anno=[L],val=I};
+pattern({float,L,F}) -> #c_float{anno=[L],val=F};
+pattern({atom,L,A}) -> #c_atom{anno=[L],val=A};
+pattern({string,L,S}) -> #c_string{anno=[L],val=S};
+pattern({nil,L}) -> #c_nil{anno=[L]};
+pattern({cons,L,H,T}) ->
+ #c_cons{anno=[L],hd=pattern(H),tl=pattern(T)};
+pattern({tuple,L,Ps}) ->
+ #c_tuple{anno=[L],es=pattern_list(Ps)};
+pattern({bin,L,Ps}) ->
+ %% We don't create a #ibinary record here, since there is
+ %% no need to hold any used/new annoations in a pattern.
+ #c_binary{anno=[L],segments=pat_bin(Ps)};
+pattern({match,_,P1,P2}) ->
+ pat_alias(pattern(P1), pattern(P2)).
+
+%% bin_pattern_list([BinElement]) -> [BinSeg].
+
+pat_bin(Ps) -> map(fun pat_segment/1, Ps).
+
+pat_segment({bin_element,_,Term,Size,[Type,{unit,Unit}|Flags]}) ->
+ #c_bitstr{val=pattern(Term),size=pattern(Size),
+ unit=core_lib:make_literal(Unit),
+ type=core_lib:make_literal(Type),
+ flags=core_lib:make_literal(Flags)}.
+
+%% pat_alias(CorePat, CorePat) -> AliasPat.
+%% Normalise aliases. Trap bad aliases by throwing 'nomatch'.
+
+pat_alias(#c_var{name=V1}, P2) -> #c_alias{var=#c_var{name=V1},pat=P2};
+pat_alias(P1, #c_var{name=V2}) -> #c_alias{var=#c_var{name=V2},pat=P1};
+pat_alias(#c_cons{}=Cons, #c_string{anno=A,val=[H|T]}=S) ->
+ pat_alias(Cons, #c_cons{anno=A,hd=#c_char{anno=A,val=H},
+ tl=S#c_string{val=T}});
+pat_alias(#c_string{anno=A,val=[H|T]}=S, #c_cons{}=Cons) ->
+ pat_alias(#c_cons{anno=A,hd=#c_char{anno=A,val=H},
+ tl=S#c_string{val=T}}, Cons);
+pat_alias(#c_nil{}=Nil, #c_string{val=[]}) ->
+ Nil;
+pat_alias(#c_string{val=[]}, #c_nil{}=Nil) ->
+ Nil;
+pat_alias(#c_cons{anno=A,hd=H1,tl=T1}, #c_cons{hd=H2,tl=T2}) ->
+ #c_cons{anno=A,hd=pat_alias(H1, H2),tl=pat_alias(T1, T2)};
+pat_alias(#c_tuple{es=Es1}, #c_tuple{es=Es2}) ->
+ #c_tuple{es=pat_alias_list(Es1, Es2)};
+pat_alias(#c_char{val=C}=Char, #c_int{val=C}) ->
+ Char;
+pat_alias(#c_int{val=C}, #c_char{val=C}=Char) ->
+ Char;
+pat_alias(#c_alias{var=V1,pat=P1},
+ #c_alias{var=V2,pat=P2}) ->
+ if V1 == V2 -> pat_alias(P1, P2);
+ true -> #c_alias{var=V1,pat=#c_alias{var=V2,pat=pat_alias(P1, P2)}}
+ end;
+pat_alias(#c_alias{var=V1,pat=P1}, P2) ->
+ #c_alias{var=V1,pat=pat_alias(P1, P2)};
+pat_alias(P1, #c_alias{var=V2,pat=P2}) ->
+ #c_alias{var=V2,pat=pat_alias(P1, P2)};
+pat_alias(P, P) -> P;
+pat_alias(_, _) -> throw(nomatch).
+
+%% pat_alias_list([A1], [A2]) -> [A].
+
+pat_alias_list([A1|A1s], [A2|A2s]) ->
+ [pat_alias(A1, A2)|pat_alias_list(A1s, A2s)];
+pat_alias_list([], []) -> [];
+pat_alias_list(_, _) -> throw(nomatch).
+
+%% pattern_list([P]) -> [P].
+
+pattern_list(Ps) -> map(fun pattern/1, Ps).
+
+%% first([A]) -> [A].
+%% last([A]) -> A.
+
+first([_]) -> [];
+first([H|T]) -> [H|first(T)].
+
+last([L]) -> L;
+last([_|T]) -> last(T).
+
+%% make_vars([Name]) -> [{Var,Name}].
+
+make_vars(Vs) -> [ #c_var{name=V} || V <- Vs ].
+
+%% new_fun_name(Type, State) -> {FunName,State}.
+
+new_fun_name(Type, #core{fcount=C}=St) ->
+ {list_to_atom(Type ++ "$^" ++ integer_to_list(C)),St#core{fcount=C+1}}.
+
+%% new_var_name(State) -> {VarName,State}.
+
+new_var_name(#core{vcount=C}=St) ->
+ {list_to_atom("cor" ++ integer_to_list(C)),St#core{vcount=C + 1}}.
+
+%% new_var(State) -> {{var,Name},State}.
+%% new_var(LineAnno, State) -> {{var,Name},State}.
+
+new_var(St) ->
+ new_var([], St).
+
+new_var(Anno, St0) ->
+ {New,St} = new_var_name(St0),
+ {#c_var{anno=Anno,name=New},St}.
+
+%% new_vars(Count, State) -> {[Var],State}.
+%% new_vars(Anno, Count, State) -> {[Var],State}.
+%% Make Count new variables.
+
+new_vars(N, St) -> new_vars_1(N, [], St, []).
+new_vars(Anno, N, St) -> new_vars_1(N, Anno, St, []).
+
+new_vars_1(N, Anno, St0, Vs) when N > 0 ->
+ {V,St1} = new_var(Anno, St0),
+ new_vars_1(N-1, Anno, St1, [V|Vs]);
+new_vars_1(0, _, St, Vs) -> {Vs,St}.
+
+fail_clause(Pats, A) ->
+ #iclause{anno=#a{anno=[compiler_generated]},
+ pats=Pats,guard=[],
+ body=[#iprimop{anno=#a{},name=#c_atom{val=match_fail},args=[A]}]}.
+
+ubody(B, St) -> uexpr(B, [], St).
+
+%% uclauses([Lclause], [KnownVar], State) -> {[Lclause],State}.
+
+uclauses(Lcs, Ks, St0) ->
+ mapfoldl(fun (Lc, St) -> uclause(Lc, Ks, St) end, St0, Lcs).
+
+%% uclause(Lclause, [KnownVar], State) -> {Lclause,State}.
+
+uclause(Cl0, Ks, St0) ->
+ {Cl1,_Pvs,Used,New,St1} = uclause(Cl0, Ks, Ks, St0),
+ A0 = get_ianno(Cl1),
+ A = A0#a{us=Used,ns=New},
+ {Cl1#iclause{anno=A},St1}.
+
+uclause(#iclause{anno=Anno,pats=Ps0,guard=G0,body=B0}, Pks, Ks0, St0) ->
+ {Ps1,Pg,Pvs,Pus,St1} = upattern_list(Ps0, Pks, St0),
+ Pu = union(Pus, intersection(Pvs, Ks0)),
+ Pn = subtract(Pvs, Pu),
+ Ks1 = union(Pn, Ks0),
+ {G1,St2} = uguard(Pg, G0, Ks1, St1),
+ Gu = used_in_any(G1),
+ Gn = new_in_any(G1),
+ Ks2 = union(Gn, Ks1),
+ {B1,St3} = uexprs(B0, Ks2, St2),
+ Used = intersection(union([Pu,Gu,used_in_any(B1)]), Ks0),
+ New = union([Pn,Gn,new_in_any(B1)]),
+ {#iclause{anno=Anno,pats=Ps1,guard=G1,body=B1},Pvs,Used,New,St3}.
+
+%% uguard([Test], [Kexpr], [KnownVar], State) -> {[Kexpr],State}.
+%% Build a guard expression list by folding in the equality tests.
+
+uguard([], [], _, St) -> {[],St};
+uguard(Pg, [], Ks, St) ->
+ %% No guard, so fold together equality tests.
+ uguard(first(Pg), [last(Pg)], Ks, St);
+uguard(Pg, Gs0, Ks, St0) ->
+ %% Gs0 must contain at least one element here.
+ {Gs3,St5} = foldr(fun (T, {Gs1,St1}) ->
+ {L,St2} = new_var(St1),
+ {R,St3} = new_var(St2),
+ {[#iset{var=L,arg=T}] ++ first(Gs1) ++
+ [#iset{var=R,arg=last(Gs1)},
+ #icall{anno=#a{}, %Must have an #a{}
+ module=#c_atom{val=erlang},
+ name=#c_atom{val='and'},
+ args=[L,R]}],
+ St3}
+ end, {Gs0,St0}, Pg),
+ %%ok = io:fwrite("core ~w: ~p~n", [?LINE,Gs3]),
+ uexprs(Gs3, Ks, St5).
+
+%% uexprs([Kexpr], [KnownVar], State) -> {[Kexpr],State}.
+
+uexprs([#imatch{anno=A,pat=P0,arg=Arg,fc=Fc}|Les], Ks, St0) ->
+ %% Optimise for simple set of unbound variable.
+ case upattern(P0, Ks, St0) of
+ {#c_var{},[],_Pvs,_Pus,_} ->
+ %% Throw our work away and just set to iset.
+ uexprs([#iset{var=P0,arg=Arg}|Les], Ks, St0);
+ _Other ->
+ %% Throw our work away and set to icase.
+ if
+ Les == [] ->
+ %% Need to explicitly return match "value", make
+ %% safe for efficiency.
+ {La,Lps,St1} = force_safe(Arg, St0),
+ Mc = #iclause{anno=A,pats=[P0],guard=[],body=[La]},
+ uexprs(Lps ++ [#icase{anno=A,
+ args=[La],clauses=[Mc],fc=Fc}], Ks, St1);
+ true ->
+ Mc = #iclause{anno=A,pats=[P0],guard=[],body=Les},
+ uexprs([#icase{anno=A,args=[Arg],
+ clauses=[Mc],fc=Fc}], Ks, St0)
+ end
+ end;
+uexprs([Le0|Les0], Ks, St0) ->
+ {Le1,St1} = uexpr(Le0, Ks, St0),
+ {Les1,St2} = uexprs(Les0, union((core_lib:get_anno(Le1))#a.ns, Ks), St1),
+ {[Le1|Les1],St2};
+uexprs([], _, St) -> {[],St}.
+
+uexpr(#iset{anno=A,var=V,arg=A0}, Ks, St0) ->
+ {A1,St1} = uexpr(A0, Ks, St0),
+ {#iset{anno=A#a{us=del_element(V#c_var.name, (core_lib:get_anno(A1))#a.us),
+ ns=add_element(V#c_var.name, (core_lib:get_anno(A1))#a.ns)},
+ var=V,arg=A1},St1};
+%% imatch done in uexprs.
+uexpr(#iletrec{anno=A,defs=Fs0,body=B0}, Ks, St0) ->
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,{Fs0,B0}]),
+ {Fs1,St1} = mapfoldl(fun ({Name,F0}, St0) ->
+ {F1,St1} = uexpr(F0, Ks, St0),
+ {{Name,F1},St1}
+ end, St0, Fs0),
+ {B1,St2} = uexprs(B0, Ks, St1),
+ Used = used_in_any(map(fun ({_,F}) -> F end, Fs1) ++ B1),
+ {#iletrec{anno=A#a{us=Used,ns=[]},defs=Fs1,body=B1},St2};
+uexpr(#icase{anno=A,args=As0,clauses=Cs0,fc=Fc0}, Ks, St0) ->
+ %% As0 will never generate new variables.
+ {As1,St1} = uexpr_list(As0, Ks, St0),
+ {Cs1,St2} = uclauses(Cs0, Ks, St1),
+ {Fc1,St3} = uclause(Fc0, Ks, St2),
+ Used = union(used_in_any(As1), used_in_any(Cs1)),
+ New = new_in_all(Cs1),
+ {#icase{anno=A#a{us=Used,ns=New},args=As1,clauses=Cs1,fc=Fc1},St3};
+uexpr(#ifun{anno=A,id=Id,vars=As,clauses=Cs0,fc=Fc0}, Ks0, St0) ->
+ Avs = lit_list_vars(As),
+ Ks1 = union(Avs, Ks0),
+ {Cs1,St1} = ufun_clauses(Cs0, Ks1, St0),
+ {Fc1,St2} = ufun_clause(Fc0, Ks1, St1),
+ Used = subtract(intersection(used_in_any(Cs1), Ks0), Avs),
+ {#ifun{anno=A#a{us=Used,ns=[]},id=Id,vars=As,clauses=Cs1,fc=Fc1},St2};
+uexpr(#iapply{anno=A,op=Op,args=As}, _, St) ->
+ Used = union(lit_vars(Op), lit_list_vars(As)),
+ {#iapply{anno=A#a{us=Used},op=Op,args=As},St};
+uexpr(#iprimop{anno=A,name=Name,args=As}, _, St) ->
+ Used = lit_list_vars(As),
+ {#iprimop{anno=A#a{us=Used},name=Name,args=As},St};
+uexpr(#icall{anno=A,module=Mod,name=Name,args=As}, _, St) ->
+ Used = union([lit_vars(Mod),lit_vars(Name),lit_list_vars(As)]),
+ {#icall{anno=A#a{us=Used},module=Mod,name=Name,args=As},St};
+uexpr(#itry{anno=A,args=As0,vars=Vs,body=Bs0,evars=Evs,handler=Hs0}, Ks, St0) ->
+ %% Note that we export only from body and exception.
+ {As1,St1} = uexprs(As0, Ks, St0),
+ {Bs1,St2} = uexprs(Bs0, Ks, St1),
+ {Hs1,St3} = uexprs(Hs0, Ks, St2),
+ Used = intersection(used_in_any(Bs1++Hs1++As1), Ks),
+ New = new_in_all(Bs1++Hs1),
+ {#itry{anno=A#a{us=Used,ns=New},
+ args=As1,vars=Vs,body=Bs1,evars=Evs,handler=Hs1},St3};
+uexpr(#icatch{anno=A,body=Es0}, Ks, St0) ->
+ {Es1,St1} = uexprs(Es0, Ks, St0),
+ {#icatch{anno=A#a{us=used_in_any(Es1)},body=Es1},St1};
+uexpr(#ireceive1{anno=A,clauses=Cs0}, Ks, St0) ->
+ {Cs1,St1} = uclauses(Cs0, Ks, St0),
+ {#ireceive1{anno=A#a{us=used_in_any(Cs1),ns=new_in_all(Cs1)},
+ clauses=Cs1},St1};
+uexpr(#ireceive2{anno=A,clauses=Cs0,timeout=Te0,action=Tes0}, Ks, St0) ->
+ %% Te0 will never generate new variables.
+ {Te1,St1} = uexpr(Te0, Ks, St0),
+ {Cs1,St2} = uclauses(Cs0, Ks, St1),
+ {Tes1,St3} = uexprs(Tes0, Ks, St2),
+ Used = union([used_in_any(Cs1),used_in_any(Tes1),
+ (core_lib:get_anno(Te1))#a.us]),
+ New = case Cs1 of
+ [] -> new_in_any(Tes1);
+ _ -> intersection(new_in_all(Cs1), new_in_any(Tes1))
+ end,
+ {#ireceive2{anno=A#a{us=Used,ns=New},
+ clauses=Cs1,timeout=Te1,action=Tes1},St3};
+uexpr(#iprotect{anno=A,body=Es0}, Ks, St0) ->
+ {Es1,St1} = uexprs(Es0, Ks, St0),
+ Used = used_in_any(Es1),
+ {#iprotect{anno=A#a{us=Used},body=Es1},St1}; %No new variables escape!
+uexpr(#ibinary{anno=A,segments=Ss}, _, St) ->
+ Used = bitstr_vars(Ss),
+ {#ibinary{anno=A#a{us=Used},segments=Ss},St};
+uexpr(Lit, _, St) ->
+ true = core_lib:is_simple(Lit), %Sanity check!
+ Vs = lit_vars(Lit),
+ Anno = core_lib:get_anno(Lit),
+ {core_lib:set_anno(Lit, #a{us=Vs,anno=Anno}),St}.
+
+uexpr_list(Les0, Ks, St0) ->
+ mapfoldl(fun (Le, St) -> uexpr(Le, Ks, St) end, St0, Les0).
+
+%% ufun_clauses([Lclause], [KnownVar], State) -> {[Lclause],State}.
+
+ufun_clauses(Lcs, Ks, St0) ->
+ mapfoldl(fun (Lc, St) -> ufun_clause(Lc, Ks, St) end, St0, Lcs).
+
+%% ufun_clause(Lclause, [KnownVar], State) -> {Lclause,State}.
+
+ufun_clause(Cl0, Ks, St0) ->
+ {Cl1,Pvs,Used,_,St1} = uclause(Cl0, [], Ks, St0),
+ A0 = get_ianno(Cl1),
+ A = A0#a{us=subtract(intersection(Used, Ks), Pvs),ns=[]},
+ {Cl1#iclause{anno=A},St1}.
+
+%% upattern(Pat, [KnownVar], State) ->
+%% {Pat,[GuardTest],[NewVar],[UsedVar],State}.
+
+upattern(#c_var{name='_'}, _, St0) ->
+ {New,St1} = new_var_name(St0),
+ {#c_var{name=New},[],[New],[],St1};
+upattern(#c_var{name=V}=Var, Ks, St0) ->
+ case is_element(V, Ks) of
+ true ->
+ {N,St1} = new_var_name(St0),
+ New = #c_var{name=N},
+ Test = #icall{anno=#a{us=add_element(N, [V])},
+ module=#c_atom{val=erlang},
+ name=#c_atom{val='=:='},
+ args=[New,Var]},
+ %% Test doesn't need protecting.
+ {New,[Test],[N],[],St1};
+ false -> {Var,[],[V],[],St0}
+ end;
+upattern(#c_cons{hd=H0,tl=T0}=Cons, Ks, St0) ->
+ {H1,Hg,Hv,Hu,St1} = upattern(H0, Ks, St0),
+ {T1,Tg,Tv,Tu,St2} = upattern(T0, union(Hv, Ks), St1),
+ {Cons#c_cons{hd=H1,tl=T1},Hg ++ Tg,union(Hv, Tv),union(Hu, Tu),St2};
+upattern(#c_tuple{es=Es0}=Tuple, Ks, St0) ->
+ {Es1,Esg,Esv,Eus,St1} = upattern_list(Es0, Ks, St0),
+ {Tuple#c_tuple{es=Es1},Esg,Esv,Eus,St1};
+upattern(#c_binary{segments=Es0}=Bin, Ks, St0) ->
+ {Es1,Esg,Esv,Eus,St1} = upat_bin(Es0, Ks, St0),
+ {Bin#c_binary{segments=Es1},Esg,Esv,Eus,St1};
+upattern(#c_alias{var=V0,pat=P0}=Alias, Ks, St0) ->
+ {V1,Vg,Vv,Vu,St1} = upattern(V0, Ks, St0),
+ {P1,Pg,Pv,Pu,St2} = upattern(P0, union(Vv, Ks), St1),
+ {Alias#c_alias{var=V1,pat=P1},Vg ++ Pg,union(Vv, Pv),union(Vu, Pu),St2};
+upattern(Other, _, St) -> {Other,[],[],[],St}. %Constants
+
+%% upattern_list([Pat], [KnownVar], State) ->
+%% {[Pat],[GuardTest],[NewVar],[UsedVar],State}.
+
+upattern_list([P0|Ps0], Ks, St0) ->
+ {P1,Pg,Pv,Pu,St1} = upattern(P0, Ks, St0),
+ {Ps1,Psg,Psv,Psu,St2} = upattern_list(Ps0, union(Pv, Ks), St1),
+ {[P1|Ps1],Pg ++ Psg,union(Pv, Psv),union(Pu, Psu),St2};
+upattern_list([], _, St) -> {[],[],[],[],St}.
+
+%% upat_bin([Pat], [KnownVar], State) ->
+%% {[Pat],[GuardTest],[NewVar],[UsedVar],State}.
+upat_bin(Es0, Ks, St0) ->
+ upat_bin(Es0, Ks, [], St0).
+
+%% upat_bin([Pat], [KnownVar], [LocalVar], State) ->
+%% {[Pat],[GuardTest],[NewVar],[UsedVar],State}.
+upat_bin([P0|Ps0], Ks, Bs, St0) ->
+ {P1,Pg,Pv,Pu,Bs1,St1} = upat_element(P0, Ks, Bs, St0),
+ {Ps1,Psg,Psv,Psu,St2} = upat_bin(Ps0, union(Pv, Ks), Bs1, St1),
+ {[P1|Ps1],Pg ++ Psg,union(Pv, Psv),union(Pu, Psu),St2};
+upat_bin([], _, _, St) -> {[],[],[],[],St}.
+
+
+%% upat_element(Segment, [KnownVar], [LocalVar], State) ->
+%% {Segment,[GuardTest],[NewVar],[UsedVar],[LocalVar],State}
+upat_element(#c_bitstr{val=H0,size=Sz}=Seg, Ks, Bs, St0) ->
+ {H1,Hg,Hv,[],St1} = upattern(H0, Ks, St0),
+ Bs1 = case H0 of
+ #c_var{name=Hname} ->
+ case H1 of
+ #c_var{name=Hname} ->
+ Bs;
+ #c_var{name=Other} ->
+ [{Hname, Other}|Bs]
+ end;
+ _ ->
+ Bs
+ end,
+ {Sz1, Us} = case Sz of
+ #c_var{name=Vname} ->
+ rename_bitstr_size(Vname, Bs);
+ _Other -> {Sz, []}
+ end,
+ {Seg#c_bitstr{val=H1, size=Sz1},Hg,Hv,Us,Bs1,St1}.
+
+rename_bitstr_size(V, [{V, N}|_]) ->
+ New = #c_var{name=N},
+ {New, [N]};
+rename_bitstr_size(V, [_|Rest]) ->
+ rename_bitstr_size(V, Rest);
+rename_bitstr_size(V, []) ->
+ Old = #c_var{name=V},
+ {Old, [V]}.
+
+used_in_any(Les) ->
+ foldl(fun (Le, Ns) -> union((core_lib:get_anno(Le))#a.us, Ns) end,
+ [], Les).
+
+new_in_any(Les) ->
+ foldl(fun (Le, Ns) -> union((core_lib:get_anno(Le))#a.ns, Ns) end,
+ [], Les).
+
+new_in_all([Le|Les]) ->
+ foldl(fun (L, Ns) -> intersection((core_lib:get_anno(L))#a.ns, Ns) end,
+ (core_lib:get_anno(Le))#a.ns, Les);
+new_in_all([]) -> [].
+
+%% The AfterVars are the variables which are used afterwards. We need
+%% this to work out which variables are actually exported and used
+%% from case/receive. In subblocks/clauses the AfterVars of the block
+%% are just the exported variables.
+
+cbody(B0, St0) ->
+ {B1,_,_,St1} = cexpr(B0, [], St0),
+ {B1,St1}.
+
+%% cclause(Lclause, [AfterVar], State) -> {Cclause,State}.
+%% The AfterVars are the exported variables.
+
+cclause(#iclause{anno=#a{anno=Anno},pats=Ps,guard=G0,body=B0}, Exp, St0) ->
+ {B1,_Us1,St1} = cexprs(B0, Exp, St0),
+ {G1,St2} = cguard(G0, St1),
+ {#c_clause{anno=Anno,pats=Ps,guard=G1,body=B1},St2}.
+
+cclauses(Lcs, Es, St0) ->
+ mapfoldl(fun (Lc, St) -> cclause(Lc, Es, St) end, St0, Lcs).
+
+cguard([], St) -> {#c_atom{val=true},St};
+cguard(Gs, St0) ->
+ {G,_,St1} = cexprs(Gs, [], St0),
+ {G,St1}.
+
+%% cexprs([Lexpr], [AfterVar], State) -> {Cexpr,[AfterVar],State}.
+%% Must be sneaky here at the last expr when combining exports for the
+%% whole sequence and exports for that expr.
+
+cexprs([#iset{var=#c_var{name=Name}=Var}=Iset], As, St) ->
+ %% Make return value explicit, and make Var true top level.
+ cexprs([Iset,Var#c_var{anno=#a{us=[Name]}}], As, St);
+cexprs([Le], As, St0) ->
+ {Ce,Es,Us,St1} = cexpr(Le, As, St0),
+ Exp = make_vars(As), %The export variables
+ if
+ Es == [] -> {core_lib:make_values([Ce|Exp]),union(Us, As),St1};
+ true ->
+ {R,St2} = new_var(St1),
+ {#c_let{anno=get_lineno_anno(Ce),
+ vars=[R|make_vars(Es)],arg=Ce,
+ body=core_lib:make_values([R|Exp])},
+ union(Us, As),St2}
+ end;
+cexprs([#iset{anno=#a{anno=A},var=V,arg=A0}|Les], As0, St0) ->
+ {Ces,As1,St1} = cexprs(Les, As0, St0),
+ {A1,Es,Us,St2} = cexpr(A0, As1, St1),
+ {#c_let{anno=A,vars=[V|make_vars(Es)],arg=A1,body=Ces},
+ union(Us, As1),St2};
+cexprs([Le|Les], As0, St0) ->
+ {Ces,As1,St1} = cexprs(Les, As0, St0),
+ {Ce,Es,Us,St2} = cexpr(Le, As1, St1),
+ if
+ Es == [] ->
+ {#c_seq{arg=Ce,body=Ces},union(Us, As1),St2};
+ true ->
+ {R,St3} = new_var(St2),
+ {#c_let{vars=[R|make_vars(Es)],arg=Ce,body=Ces},
+ union(Us, As1),St3}
+ end.
+
+%% cexpr(Lexpr, [AfterVar], State) -> {Cexpr,[ExpVar],[UsedVar],State}.
+
+cexpr(#iletrec{anno=A,defs=Fs0,body=B0}, As, St0) ->
+ {Fs1,{_,St1}} = mapfoldl(fun ({Name,F0}, {Used,St0}) ->
+ {F1,[],Us,St1} = cexpr(F0, [], St0),
+ {#c_def{name=#c_fname{id=Name,arity=1},
+ val=F1},
+ {union(Us, Used),St1}}
+ end, {[],St0}, Fs0),
+ Exp = intersection(A#a.ns, As),
+ {B1,_Us,St2} = cexprs(B0, Exp, St1),
+ {#c_letrec{anno=A#a.anno,defs=Fs1,body=B1},Exp,A#a.us,St2};
+cexpr(#icase{anno=A,args=Largs,clauses=Lcs,fc=Lfc}, As, St0) ->
+ Exp = intersection(A#a.ns, As), %Exports
+ {Cargs,St1} = foldr(fun (La, {Cas,Sta}) ->
+ {Ca,[],_Us1,Stb} = cexpr(La, As, Sta),
+ {[Ca|Cas],Stb}
+ end, {[],St0}, Largs),
+ {Ccs,St2} = cclauses(Lcs, Exp, St1),
+ {Cfc,St3} = cclause(Lfc, [], St2), %Never exports
+ {#c_case{anno=A#a.anno,
+ arg=core_lib:make_values(Cargs),clauses=Ccs ++ [Cfc]},
+ Exp,A#a.us,St3};
+cexpr(#ireceive1{anno=A,clauses=Lcs}, As, St0) ->
+ Exp = intersection(A#a.ns, As), %Exports
+ {Ccs,St1} = cclauses(Lcs, Exp, St0),
+ {#c_receive{anno=A#a.anno,
+ clauses=Ccs,
+ timeout=#c_atom{val=infinity},action=#c_atom{val=true}},
+ Exp,A#a.us,St1};
+cexpr(#ireceive2{anno=A,clauses=Lcs,timeout=Lto,action=Les}, As, St0) ->
+ Exp = intersection(A#a.ns, As), %Exports
+ {Cto,[],_Us1,St1} = cexpr(Lto, As, St0),
+ {Ccs,St2} = cclauses(Lcs, Exp, St1),
+ {Ces,_Us2,St3} = cexprs(Les, Exp, St2),
+ {#c_receive{anno=A#a.anno,
+ clauses=Ccs,timeout=Cto,action=Ces},
+ Exp,A#a.us,St3};
+cexpr(#itry{anno=A,args=La,vars=Vs,body=Lb,evars=Evs,handler=Lh}, As, St0) ->
+ Exp = intersection(A#a.ns, As), %Exports
+ {Ca,_Us1,St1} = cexprs(La, [], St0),
+ {Cb,_Us2,St2} = cexprs(Lb, Exp, St1),
+ {Ch,_Us3,St3} = cexprs(Lh, Exp, St2),
+ {#c_try{anno=A#a.anno,arg=Ca,vars=Vs,body=Cb,evars=Evs,handler=Ch},
+ Exp,A#a.us,St3};
+cexpr(#icatch{anno=A,body=Les}, _As, St0) ->
+ {Ces,_Us1,St1} = cexprs(Les, [], St0), %Never export!
+ {#c_catch{body=Ces},[],A#a.us,St1};
+cexpr(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) ->
+ {Ccs,St1} = cclauses(Lcs, [], St0), %NEVER export!
+ {Cfc,St2} = cclause(Lfc, [], St1),
+ Anno = A#a.anno,
+ {#c_fun{anno=Id++Anno,vars=Args,
+ body=#c_case{anno=Anno,
+ arg=core_lib:set_anno(core_lib:make_values(Args), Anno),
+ clauses=Ccs ++ [Cfc]}},
+ [],A#a.us,St2};
+cexpr(#iapply{anno=A,op=Op,args=Args}, _As, St) ->
+ {#c_apply{anno=A#a.anno,op=Op,args=Args},[],A#a.us,St};
+cexpr(#icall{anno=A,module=Mod,name=Name,args=Args}, _As, St) ->
+ {#c_call{anno=A#a.anno,module=Mod,name=Name,args=Args},[],A#a.us,St};
+cexpr(#iprimop{anno=A,name=Name,args=Args}, _As, St) ->
+ {#c_primop{anno=A#a.anno,name=Name,args=Args},[],A#a.us,St};
+cexpr(#iprotect{anno=A,body=Es}, _As, St0) ->
+ {Ce,_,St1} = cexprs(Es, [], St0),
+ V = #c_var{name='Try'}, %The names are arbitrary
+ Vs = [#c_var{name='T'},#c_var{name='R'}],
+ {#c_try{anno=A#a.anno,arg=Ce,vars=[V],body=V,
+ evars=Vs,handler=#c_atom{val=false}},
+ [],A#a.us,St1};
+cexpr(#ibinary{anno=#a{anno=Anno,us=Us},segments=Segs}, _As, St) ->
+ {#c_binary{anno=Anno,segments=Segs},[],Us,St};
+cexpr(Lit, _As, St) ->
+ true = core_lib:is_simple(Lit), %Sanity check!
+ Anno = core_lib:get_anno(Lit),
+ Vs = Anno#a.us,
+ %%Vs = lit_vars(Lit),
+ {core_lib:set_anno(Lit, Anno#a.anno),[],Vs,St}.
+
+%% lit_vars(Literal) -> [Var].
+
+lit_vars(Lit) -> lit_vars(Lit, []).
+
+lit_vars(#c_cons{hd=H,tl=T}, Vs) -> lit_vars(H, lit_vars(T, Vs));
+lit_vars(#c_tuple{es=Es}, Vs) -> lit_list_vars(Es, Vs);
+lit_vars(#c_var{name=V}, Vs) -> add_element(V, Vs);
+lit_vars(_, Vs) -> Vs. %These are atomic
+
+% lit_bin_vars(Segs, Vs) ->
+% foldl(fun (#c_bitstr{val=V,size=S}, Vs0) ->
+% lit_vars(V, lit_vars(S, Vs0))
+% end, Vs, Segs).
+
+lit_list_vars(Ls) -> lit_list_vars(Ls, []).
+
+lit_list_vars(Ls, Vs) ->
+ foldl(fun (L, Vs0) -> lit_vars(L, Vs0) end, Vs, Ls).
+
+bitstr_vars(Segs) ->
+ bitstr_vars(Segs, []).
+
+bitstr_vars(Segs, Vs) ->
+ foldl(fun (#c_bitstr{val=V,size=S}, Vs0) ->
+ lit_vars(V, lit_vars(S, Vs0))
+ end, Vs, Segs).
+
+get_ianno(Ce) ->
+ case core_lib:get_anno(Ce) of
+ #a{}=A -> A;
+ A when is_list(A) -> #a{anno=A}
+ end.
+
+get_lineno_anno(Ce) ->
+ case core_lib:get_anno(Ce) of
+ #a{anno=A} -> A;
+ A when is_list(A) -> A
+ end.
+
+
+%%%
+%%% Handling of warnings.
+%%%
+
+format_error(nomatch) -> "pattern cannot possibly match".
+
+add_warning(Line, Term, #core{ws=Ws}=St) when Line >= 0 ->
+ St#core{ws=[{Line,?MODULE,Term}|Ws]};
+add_warning(_, _, St) -> St.
+
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.erl
new file mode 100644
index 0000000000..2d600fabc4
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.erl
@@ -0,0 +1,1568 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: v3_kernel.erl,v 1.3 2010/03/04 13:54:20 maria Exp $
+%%
+%% Purpose : Transform Core Erlang to Kernel Erlang
+
+%% Kernel erlang is like Core Erlang with a few significant
+%% differences:
+%%
+%% 1. It is flat! There are no nested calls or sub-blocks.
+%%
+%% 2. All variables are unique in a function. There is no scoping, or
+%% rather the scope is the whole function.
+%%
+%% 3. Pattern matching (in cases and receives) has been compiled.
+%%
+%% 4. The annotations contain variable usages. Seeing we have to work
+%% this out anyway for funs we might as well pass it on for free to
+%% later passes.
+%%
+%% 5. All remote-calls are to statically named m:f/a. Meta-calls are
+%% passed via erlang:apply/3.
+%%
+%% The translation is done in two passes:
+%%
+%% 1. Basic translation, translate variable/function names, flatten
+%% completely, pattern matching compilation.
+%%
+%% 2. Fun-lifting (lambda-lifting), variable usage annotation and
+%% last-call handling.
+%%
+%% All new Kexprs are created in the first pass, they are just
+%% annotated in the second.
+%%
+%% Functions and BIFs
+%%
+%% Functions are "call"ed or "enter"ed if it is a last call, their
+%% return values may be ignored. BIFs are things which are known to
+%% be internal by the compiler and can only be called, their return
+%% values cannot be ignored.
+%%
+%% Letrec's are handled rather naively. All the functions in one
+%% letrec are handled as one block to find the free variables. While
+%% this is not optimal it reflects how letrec's often are used. We
+%% don't have to worry about variable shadowing and nested letrec's as
+%% this is handled in the variable/function name translation. There
+%% is a little bit of trickery to ensure letrec transformations fit
+%% into the scheme of things.
+%%
+%% To ensure unique variable names we use a variable substitution
+%% table and keep the set of all defined variables. The nested
+%% scoping of Core means that we must also nest the substitution
+%% tables, but the defined set must be passed through to match the
+%% flat structure of Kernel and to make sure variables with the same
+%% name from different scopes get different substitutions.
+%%
+%% We also use these substitutions to handle the variable renaming
+%% necessary in pattern matching compilation.
+%%
+%% The pattern matching compilation assumes that the values of
+%% different types don't overlap. This means that as there is no
+%% character type yet in the machine all characters must be converted
+%% to integers!
+
+-module(v3_kernel).
+
+-export([module/2,format_error/1]).
+
+-import(lists, [map/2,foldl/3,foldr/3,mapfoldl/3,splitwith/2,
+ member/2,reverse/1,reverse/2]).
+-import(ordsets, [add_element/2,del_element/2,union/2,union/1,subtract/2]).
+
+-include("core_parse.hrl").
+-include("v3_kernel.hrl").
+
+%% These are not defined in v3_kernel.hrl.
+get_kanno(Kthing) -> element(2, Kthing).
+set_kanno(Kthing, Anno) -> setelement(2, Kthing, Anno).
+
+%% Internal kernel expressions and help functions.
+%% N.B. the annotation field is ALWAYS the first field!
+
+-record(ivalues, {anno=[],args}).
+-record(ifun, {anno=[],vars,body}).
+-record(iset, {anno=[],vars,arg,body}).
+-record(iletrec, {anno=[],defs}).
+-record(ialias, {anno=[],vars,pat}).
+-record(iclause, {anno=[],sub,pats,guard,body}).
+-record(ireceive_accept, {anno=[],arg}).
+-record(ireceive_next, {anno=[],arg}).
+
+%% State record for kernel translator.
+-record(kern, {func, %Current function
+ vcount=0, %Variable counter
+ fcount=0, %Fun counter
+ ds=[], %Defined variables
+ funs=[], %Fun functions
+ free=[], %Free variables
+ ws=[], %Warnings.
+ extinstr=false}). %Generate extended instructions
+
+module(#c_module{anno=A,name=M,exports=Es,attrs=As,defs=Fs}, Options) ->
+ ExtInstr = not member(no_new_apply, Options),
+ {Kfs,St} = mapfoldl(fun function/2, #kern{extinstr=ExtInstr}, Fs),
+ Kes = map(fun (#c_fname{id=N,arity=Ar}) -> {N,Ar} end, Es),
+ Kas = map(fun (#c_def{name=#c_atom{val=N},val=V}) ->
+ {N,core_lib:literal_value(V)} end, As),
+ {ok,#k_mdef{anno=A,name=M#c_atom.val,exports=Kes,attributes=Kas,
+ body=Kfs ++ St#kern.funs},St#kern.ws}.
+
+function(#c_def{anno=Af,name=#c_fname{id=F,arity=Arity},val=Body}, St0) ->
+ %%ok = io:fwrite("kern: ~p~n", [{F,Arity}]),
+ St1 = St0#kern{func={F,Arity},vcount=0,fcount=0,ds=sets:new()},
+ {#ifun{anno=Ab,vars=Kvs,body=B0},[],St2} = expr(Body, new_sub(), St1),
+ {B1,_,St3} = ubody(B0, return, St2),
+ %%B1 = B0, St3 = St2, %Null second pass
+ {#k_fdef{anno=#k{us=[],ns=[],a=Af ++ Ab},
+ func=F,arity=Arity,vars=Kvs,body=B1},St3}.
+
+%% body(Cexpr, Sub, State) -> {Kexpr,[PreKepxr],State}.
+%% Do the main sequence of a body. A body ends in an atomic value or
+%% values. Must check if vector first so do expr.
+
+body(#c_values{anno=A,es=Ces}, Sub, St0) ->
+ %% Do this here even if only in bodies.
+ {Kes,Pe,St1} = atomic_list(Ces, Sub, St0),
+ %%{Kes,Pe,St1} = expr_list(Ces, Sub, St0),
+ {#ivalues{anno=A,args=Kes},Pe,St1};
+body(#ireceive_next{anno=A}, _, St) ->
+ {#k_receive_next{anno=A},[],St};
+body(Ce, Sub, St0) ->
+ expr(Ce, Sub, St0).
+
+%% guard(Cexpr, Sub, State) -> {Kexpr,State}.
+%% We handle guards almost as bodies. The only special thing we
+%% must do is to make the final Kexpr a #k_test{}.
+%% Also, we wrap the entire guard in a try/catch which is
+%% not strictly needed, but makes sure that every 'bif' instruction
+%% will get a proper failure label.
+
+guard(G0, Sub, St0) ->
+ {G1,St1} = wrap_guard(G0, St0),
+ {Ge0,Pre,St2} = expr(G1, Sub, St1),
+ {Ge,St} = gexpr_test(Ge0, St2),
+ {pre_seq(Pre, Ge),St}.
+
+%% Wrap the entire guard in a try/catch if needed.
+
+wrap_guard(#c_try{}=Try, St) -> {Try,St};
+wrap_guard(Core, St0) ->
+ {VarName,St} = new_var_name(St0),
+ Var = #c_var{name=VarName},
+ Try = #c_try{arg=Core,vars=[Var],body=Var,evars=[],handler=#c_atom{val=false}},
+ {Try,St}.
+
+%% gexpr_test(Kexpr, State) -> {Kexpr,State}.
+%% Builds the final boolean test from the last Kexpr in a guard test.
+%% Must enter try blocks and isets and find the last Kexpr in them.
+%% This must end in a recognised BEAM test!
+
+gexpr_test(#k_bif{anno=A,op=#k_remote{mod=#k_atom{val=erlang},
+ name=#k_atom{val=is_boolean},arity=1}=Op,
+ args=Kargs}, St) ->
+ %% XXX Remove this clause in R11. For bootstrap purposes, we must
+ %% recognize erlang:is_boolean/1 here.
+ {#k_test{anno=A,op=Op,args=Kargs},St};
+gexpr_test(#k_bif{anno=A,op=#k_remote{mod=#k_atom{val=erlang},
+ name=#k_atom{val=internal_is_record},arity=3}=Op,
+ args=Kargs}, St) ->
+ {#k_test{anno=A,op=Op,args=Kargs},St};
+gexpr_test(#k_bif{anno=A,op=#k_remote{mod=#k_atom{val=erlang},
+ name=#k_atom{val=F},arity=Ar}=Op,
+ args=Kargs}=Ke, St) ->
+ %% Either convert to test if ok, or add test.
+ %% At this stage, erlang:float/1 is not a type test. (It should
+ %% have been converted to erlang:is_float/1.)
+ case erl_internal:new_type_test(F, Ar) orelse
+ erl_internal:comp_op(F, Ar) of
+ true -> {#k_test{anno=A,op=Op,args=Kargs},St};
+ false -> gexpr_test_add(Ke, St) %Add equality test
+ end;
+gexpr_test(#k_try{arg=B0,vars=[#k_var{name=X}],body=#k_var{name=X},
+ handler=#k_atom{val=false}}=Try, St0) ->
+ {B,St} = gexpr_test(B0, St0),
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,{B0,B}]),
+ {Try#k_try{arg=B},St};
+gexpr_test(#iset{body=B0}=Iset, St0) ->
+ {B1,St1} = gexpr_test(B0, St0),
+ {Iset#iset{body=B1},St1};
+gexpr_test(Ke, St) -> gexpr_test_add(Ke, St). %Add equality test
+
+gexpr_test_add(Ke, St0) ->
+ Test = #k_remote{mod=#k_atom{val='erlang'},
+ name=#k_atom{val='=:='},
+ arity=2},
+ {Ae,Ap,St1} = force_atomic(Ke, St0),
+ {pre_seq(Ap, #k_test{anno=get_kanno(Ke),
+ op=Test,args=[Ae,#k_atom{val='true'}]}),St1}.
+
+%% expr(Cexpr, Sub, State) -> {Kexpr,[PreKexpr],State}.
+%% Convert a Core expression, flattening it at the same time.
+
+expr(#c_var{anno=A,name=V}, Sub, St) ->
+ {#k_var{anno=A,name=get_vsub(V, Sub)},[],St};
+expr(#c_char{anno=A,val=C}, _Sub, St) ->
+ {#k_int{anno=A,val=C},[],St}; %Convert to integers!
+expr(#c_int{anno=A,val=I}, _Sub, St) ->
+ {#k_int{anno=A,val=I},[],St};
+expr(#c_float{anno=A,val=F}, _Sub, St) ->
+ {#k_float{anno=A,val=F},[],St};
+expr(#c_atom{anno=A,val=At}, _Sub, St) ->
+ {#k_atom{anno=A,val=At},[],St};
+expr(#c_string{anno=A,val=S}, _Sub, St) ->
+ {#k_string{anno=A,val=S},[],St};
+expr(#c_nil{anno=A}, _Sub, St) ->
+ {#k_nil{anno=A},[],St};
+expr(#c_cons{anno=A,hd=Ch,tl=Ct}, Sub, St0) ->
+ %% Do cons in two steps, first the expressions left to right, then
+ %% any remaining literals right to left.
+ {Kh0,Hp0,St1} = expr(Ch, Sub, St0),
+ {Kt0,Tp0,St2} = expr(Ct, Sub, St1),
+ {Kt1,Tp1,St3} = force_atomic(Kt0, St2),
+ {Kh1,Hp1,St4} = force_atomic(Kh0, St3),
+ {#k_cons{anno=A,hd=Kh1,tl=Kt1},Hp0 ++ Tp0 ++ Tp1 ++ Hp1,St4};
+expr(#c_tuple{anno=A,es=Ces}, Sub, St0) ->
+ {Kes,Ep,St1} = atomic_list(Ces, Sub, St0),
+ {#k_tuple{anno=A,es=Kes},Ep,St1};
+expr(#c_binary{anno=A,segments=Cv}, Sub, St0) ->
+ case catch atomic_bin(Cv, Sub, St0, 0) of
+ {'EXIT',R} -> exit(R);
+ bad_element_size ->
+ Erl = #c_atom{val=erlang},
+ Name = #c_atom{val=error},
+ Args = [#c_atom{val=badarg}],
+ Fault = #c_call{module=Erl,name=Name,args=Args},
+ expr(Fault, Sub, St0);
+ {Kv,Ep,St1} ->
+ {#k_binary{anno=A,segs=Kv},Ep,St1}
+ end;
+expr(#c_fname{anno=A,arity=Ar}=Fname, Sub, St) ->
+ %% A local in an expression.
+ %% For now, these are wrapped into a fun by reverse
+ %% etha-conversion, but really, there should be exactly one
+ %% such "lambda function" for each escaping local name,
+ %% instead of one for each occurrence as done now.
+ Vs = [#c_var{name=list_to_atom("V" ++ integer_to_list(V))} ||
+ V <- integers(1, Ar)],
+ Fun = #c_fun{anno=A,vars=Vs,body=#c_apply{op=Fname,args=Vs}},
+ expr(Fun, Sub, St);
+expr(#c_fun{anno=A,vars=Cvs,body=Cb}, Sub0, St0) ->
+ {Kvs,Sub1,St1} = pattern_list(Cvs, Sub0, St0),
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,{{Cvs,Sub0,St0},{Kvs,Sub1,St1}}]),
+ {Kb,Pb,St2} = body(Cb, Sub1, St1),
+ {#ifun{anno=A,vars=Kvs,body=pre_seq(Pb, Kb)},[],St2};
+expr(#c_seq{arg=Ca,body=Cb}, Sub, St0) ->
+ {Ka,Pa,St1} = body(Ca, Sub, St0),
+ case is_exit_expr(Ka) of
+ true -> {Ka,Pa,St1};
+ false ->
+ {Kb,Pb,St2} = body(Cb, Sub, St1),
+ {Kb,Pa ++ [Ka] ++ Pb,St2}
+ end;
+expr(#c_let{anno=A,vars=Cvs,arg=Ca,body=Cb}, Sub0, St0) ->
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,{Cvs,Sub0,St0}]),
+ {Ka,Pa,St1} = body(Ca, Sub0, St0),
+ case is_exit_expr(Ka) of
+ true -> {Ka,Pa,St1};
+ false ->
+ {Kps,Sub1,St2} = pattern_list(Cvs, Sub0, St1),
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,{Kps,Sub1,St1,St2}]),
+ %% Break known multiple values into separate sets.
+ Sets = case Ka of
+ #ivalues{args=Kas} ->
+ foldr2(fun (V, Val, Sb) ->
+ [#iset{vars=[V],arg=Val}|Sb] end,
+ [], Kps, Kas);
+ _Other ->
+ [#iset{anno=A,vars=Kps,arg=Ka}]
+ end,
+ {Kb,Pb,St3} = body(Cb, Sub1, St2),
+ {Kb,Pa ++ Sets ++ Pb,St3}
+ end;
+expr(#c_letrec{anno=A,defs=Cfs,body=Cb}, Sub0, St0) ->
+ %% Make new function names and store substitution.
+ {Fs0,{Sub1,St1}} =
+ mapfoldl(fun (#c_def{name=#c_fname{id=F,arity=Ar},val=B}, {Sub,St0}) ->
+ {N,St1} = new_fun_name(atom_to_list(F)
+ ++ "/" ++
+ integer_to_list(Ar),
+ St0),
+ {{N,B},{set_fsub(F, Ar, N, Sub),St1}}
+ end, {Sub0,St0}, Cfs),
+ %% Run translation on functions and body.
+ {Fs1,St2} = mapfoldl(fun ({N,Fd0}, St1) ->
+ {Fd1,[],St2} = expr(Fd0, Sub1, St1),
+ Fd = set_kanno(Fd1, A),
+ {{N,Fd},St2}
+ end, St1, Fs0),
+ {Kb,Pb,St3} = body(Cb, Sub1, St2),
+ {Kb,[#iletrec{anno=A,defs=Fs1}|Pb],St3};
+expr(#c_case{arg=Ca,clauses=Ccs}, Sub, St0) ->
+ {Ka,Pa,St1} = body(Ca, Sub, St0), %This is a body!
+ {Kvs,Pv,St2} = match_vars(Ka, St1), %Must have variables here!
+ {Km,St3} = kmatch(Kvs, Ccs, Sub, St2),
+ Match = flatten_seq(build_match(Kvs, Km)),
+ {last(Match),Pa ++ Pv ++ first(Match),St3};
+expr(#c_receive{anno=A,clauses=Ccs0,timeout=Ce,action=Ca}, Sub, St0) ->
+ {Ke,Pe,St1} = atomic_lit(Ce, Sub, St0), %Force this to be atomic!
+ {Rvar,St2} = new_var(St1),
+ %% Need to massage accept clauses and add reject clause before matching.
+ Ccs1 = map(fun (#c_clause{anno=Banno,body=B0}=C) ->
+ B1 = #c_seq{arg=#ireceive_accept{anno=A},body=B0},
+ C#c_clause{anno=Banno,body=B1}
+ end, Ccs0),
+ {Mpat,St3} = new_var_name(St2),
+ Rc = #c_clause{anno=[compiler_generated|A],
+ pats=[#c_var{name=Mpat}],guard=#c_atom{anno=A,val=true},
+ body=#ireceive_next{anno=A}},
+ {Km,St4} = kmatch([Rvar], Ccs1 ++ [Rc], Sub, add_var_def(Rvar, St3)),
+ {Ka,Pa,St5} = body(Ca, Sub, St4),
+ {#k_receive{anno=A,var=Rvar,body=Km,timeout=Ke,action=pre_seq(Pa, Ka)},
+ Pe,St5};
+expr(#c_apply{anno=A,op=Cop,args=Cargs}, Sub, St) ->
+ c_apply(A, Cop, Cargs, Sub, St);
+expr(#c_call{anno=A,module=M0,name=F0,args=Cargs}, Sub, St0) ->
+ {[M1,F1|Kargs],Ap,St1} = atomic_list([M0,F0|Cargs], Sub, St0),
+ Ar = length(Cargs),
+ case {M1,F1} of
+ {#k_atom{val=Ma},#k_atom{val=Fa}} ->
+ Call = case is_remote_bif(Ma, Fa, Ar) of
+ true ->
+ #k_bif{anno=A,
+ op=#k_remote{mod=M1,name=F1,arity=Ar},
+ args=Kargs};
+ false ->
+ #k_call{anno=A,
+ op=#k_remote{mod=M1,name=F1,arity=Ar},
+ args=Kargs}
+ end,
+ {Call,Ap,St1};
+ _Other when St0#kern.extinstr == false -> %Old explicit apply
+ Call = #c_call{anno=A,
+ module=#c_atom{val=erlang},
+ name=#c_atom{val=apply},
+ args=[M0,F0,make_list(Cargs)]},
+ expr(Call, Sub, St0);
+ _Other -> %New instruction in R10.
+ Call = #k_call{anno=A,
+ op=#k_remote{mod=M1,name=F1,arity=Ar},
+ args=Kargs},
+ {Call,Ap,St1}
+ end;
+expr(#c_primop{anno=A,name=#c_atom{val=match_fail},args=Cargs}, Sub, St0) ->
+ %% This special case will disappear.
+ {Kargs,Ap,St1} = atomic_list(Cargs, Sub, St0),
+ Ar = length(Cargs),
+ Call = #k_call{anno=A,op=#k_internal{name=match_fail,arity=Ar},args=Kargs},
+ {Call,Ap,St1};
+expr(#c_primop{anno=A,name=#c_atom{val=N},args=Cargs}, Sub, St0) ->
+ {Kargs,Ap,St1} = atomic_list(Cargs, Sub, St0),
+ Ar = length(Cargs),
+ {#k_bif{anno=A,op=#k_internal{name=N,arity=Ar},args=Kargs},Ap,St1};
+expr(#c_try{anno=A,arg=Ca,vars=Cvs,body=Cb,evars=Evs,handler=Ch}, Sub0, St0) ->
+ %% The normal try expression. The body and exception handler
+ %% variables behave as let variables.
+ {Ka,Pa,St1} = body(Ca, Sub0, St0),
+ {Kcvs,Sub1,St2} = pattern_list(Cvs, Sub0, St1),
+ {Kb,Pb,St3} = body(Cb, Sub1, St2),
+ {Kevs,Sub2,St4} = pattern_list(Evs, Sub0, St3),
+ {Kh,Ph,St5} = body(Ch, Sub2, St4),
+ {#k_try{anno=A,arg=pre_seq(Pa, Ka),
+ vars=Kcvs,body=pre_seq(Pb, Kb),
+ evars=Kevs,handler=pre_seq(Ph, Kh)},[],St5};
+expr(#c_catch{anno=A,body=Cb}, Sub, St0) ->
+ {Kb,Pb,St1} = body(Cb, Sub, St0),
+ {#k_catch{anno=A,body=pre_seq(Pb, Kb)},[],St1};
+%% Handle internal expressions.
+expr(#ireceive_accept{anno=A}, _Sub, St) -> {#k_receive_accept{anno=A},[],St}.
+
+%% expr_list([Cexpr], Sub, State) -> {[Kexpr],[PreKexpr],State}.
+
+% expr_list(Ces, Sub, St) ->
+% foldr(fun (Ce, {Kes,Esp,St0}) ->
+% {Ke,Ep,St1} = expr(Ce, Sub, St0),
+% {[Ke|Kes],Ep ++ Esp,St1}
+% end, {[],[],St}, Ces).
+
+%% match_vars(Kexpr, State) -> {[Kvar],[PreKexpr],State}.
+%% Force return from body into a list of variables.
+
+match_vars(#ivalues{args=As}, St) ->
+ foldr(fun (Ka, {Vs,Vsp,St0}) ->
+ {V,Vp,St1} = force_variable(Ka, St0),
+ {[V|Vs],Vp ++ Vsp,St1}
+ end, {[],[],St}, As);
+match_vars(Ka, St0) ->
+ {V,Vp,St1} = force_variable(Ka, St0),
+ {[V],Vp,St1}.
+
+%% c_apply(A, Op, [Carg], Sub, State) -> {Kexpr,[PreKexpr],State}.
+%% Transform application, detect which are guaranteed to be bifs.
+
+c_apply(A, #c_fname{anno=Ra,id=F0,arity=Ar}, Cargs, Sub, St0) ->
+ {Kargs,Ap,St1} = atomic_list(Cargs, Sub, St0),
+ F1 = get_fsub(F0, Ar, Sub), %Has it been rewritten
+ {#k_call{anno=A,op=#k_local{anno=Ra,name=F1,arity=Ar},args=Kargs},
+ Ap,St1};
+c_apply(A, Cop, Cargs, Sub, St0) ->
+ {Kop,Op,St1} = variable(Cop, Sub, St0),
+ {Kargs,Ap,St2} = atomic_list(Cargs, Sub, St1),
+ {#k_call{anno=A,op=Kop,args=Kargs},Op ++ Ap,St2}.
+
+flatten_seq(#iset{anno=A,vars=Vs,arg=Arg,body=B}) ->
+ [#iset{anno=A,vars=Vs,arg=Arg}|flatten_seq(B)];
+flatten_seq(Ke) -> [Ke].
+
+pre_seq([#iset{anno=A,vars=Vs,arg=Arg,body=B}|Ps], K) ->
+ B = undefined, %Assertion.
+ #iset{anno=A,vars=Vs,arg=Arg,body=pre_seq(Ps, K)};
+pre_seq([P|Ps], K) ->
+ #iset{vars=[],arg=P,body=pre_seq(Ps, K)};
+pre_seq([], K) -> K.
+
+%% atomic_lit(Cexpr, Sub, State) -> {Katomic,[PreKexpr],State}.
+%% Convert a Core expression making sure the result is an atomic
+%% literal.
+
+atomic_lit(Ce, Sub, St0) ->
+ {Ke,Kp,St1} = expr(Ce, Sub, St0),
+ {Ka,Ap,St2} = force_atomic(Ke, St1),
+ {Ka,Kp ++ Ap,St2}.
+
+force_atomic(Ke, St0) ->
+ case is_atomic(Ke) of
+ true -> {Ke,[],St0};
+ false ->
+ {V,St1} = new_var(St0),
+ {V,[#iset{vars=[V],arg=Ke}],St1}
+ end.
+
+% force_atomic_list(Kes, St) ->
+% foldr(fun (Ka, {As,Asp,St0}) ->
+% {A,Ap,St1} = force_atomic(Ka, St0),
+% {[A|As],Ap ++ Asp,St1}
+% end, {[],[],St}, Kes).
+
+atomic_bin([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
+ Sub, St0, B0) ->
+ {E,Ap1,St1} = atomic_lit(E0, Sub, St0),
+ {S1,Ap2,St2} = atomic_lit(S0, Sub, St1),
+ validate_bin_element_size(S1),
+ U0 = core_lib:literal_value(U),
+ Fs0 = core_lib:literal_value(Fs),
+ {B1,Fs1} = aligned(B0, S1, U0, Fs0),
+ {Es,Ap3,St3} = atomic_bin(Es0, Sub, St2, B1),
+ {#k_bin_seg{anno=A,size=S1,
+ unit=U0,
+ type=core_lib:literal_value(T),
+ flags=Fs1,
+ seg=E,next=Es},
+ Ap1++Ap2++Ap3,St3};
+atomic_bin([], _Sub, St, _Bits) -> {#k_bin_end{},[],St}.
+
+validate_bin_element_size(#k_var{}) -> ok;
+validate_bin_element_size(#k_int{val=V}) when V >= 0 -> ok;
+validate_bin_element_size(#k_atom{val=all}) -> ok;
+validate_bin_element_size(_) -> throw(bad_element_size).
+
+%% atomic_list([Cexpr], Sub, State) -> {[Kexpr],[PreKexpr],State}.
+
+atomic_list(Ces, Sub, St) ->
+ foldr(fun (Ce, {Kes,Esp,St0}) ->
+ {Ke,Ep,St1} = atomic_lit(Ce, Sub, St0),
+ {[Ke|Kes],Ep ++ Esp,St1}
+ end, {[],[],St}, Ces).
+
+%% is_atomic(Kexpr) -> boolean().
+%% Is a Kexpr atomic? Strings are NOT considered atomic!
+
+is_atomic(#k_int{}) -> true;
+is_atomic(#k_float{}) -> true;
+is_atomic(#k_atom{}) -> true;
+%%is_atomic(#k_char{}) -> true; %No characters
+%%is_atomic(#k_string{}) -> true;
+is_atomic(#k_nil{}) -> true;
+is_atomic(#k_var{}) -> true;
+is_atomic(_) -> false.
+
+%% variable(Cexpr, Sub, State) -> {Kvar,[PreKexpr],State}.
+%% Convert a Core expression making sure the result is a variable.
+
+variable(Ce, Sub, St0) ->
+ {Ke,Kp,St1} = expr(Ce, Sub, St0),
+ {Kv,Vp,St2} = force_variable(Ke, St1),
+ {Kv,Kp ++ Vp,St2}.
+
+force_variable(#k_var{}=Ke, St) -> {Ke,[],St};
+force_variable(Ke, St0) ->
+ {V,St1} = new_var(St0),
+ {V,[#iset{vars=[V],arg=Ke}],St1}.
+
+%% pattern(Cpat, Sub, State) -> {Kpat,Sub,State}.
+%% Convert patterns. Variables shadow so rename variables that are
+%% already defined.
+
+pattern(#c_var{anno=A,name=V}, Sub, St0) ->
+ case sets:is_element(V, St0#kern.ds) of
+ true ->
+ {New,St1} = new_var_name(St0),
+ {#k_var{anno=A,name=New},
+ set_vsub(V, New, Sub),
+ St1#kern{ds=sets:add_element(New, St1#kern.ds)}};
+ false ->
+ {#k_var{anno=A,name=V},Sub,
+ St0#kern{ds=sets:add_element(V, St0#kern.ds)}}
+ end;
+pattern(#c_char{anno=A,val=C}, Sub, St) ->
+ {#k_int{anno=A,val=C},Sub,St}; %Convert to integers!
+pattern(#c_int{anno=A,val=I}, Sub, St) ->
+ {#k_int{anno=A,val=I},Sub,St};
+pattern(#c_float{anno=A,val=F}, Sub, St) ->
+ {#k_float{anno=A,val=F},Sub,St};
+pattern(#c_atom{anno=A,val=At}, Sub, St) ->
+ {#k_atom{anno=A,val=At},Sub,St};
+pattern(#c_string{val=S}, Sub, St) ->
+ L = foldr(fun (C, T) -> #k_cons{hd=#k_int{val=C},tl=T} end,
+ #k_nil{}, S),
+ {L,Sub,St};
+pattern(#c_nil{anno=A}, Sub, St) ->
+ {#k_nil{anno=A},Sub,St};
+pattern(#c_cons{anno=A,hd=Ch,tl=Ct}, Sub0, St0) ->
+ {Kh,Sub1,St1} = pattern(Ch, Sub0, St0),
+ {Kt,Sub2,St2} = pattern(Ct, Sub1, St1),
+ {#k_cons{anno=A,hd=Kh,tl=Kt},Sub2,St2};
+pattern(#c_tuple{anno=A,es=Ces}, Sub0, St0) ->
+ {Kes,Sub1,St1} = pattern_list(Ces, Sub0, St0),
+ {#k_tuple{anno=A,es=Kes},Sub1,St1};
+pattern(#c_binary{anno=A,segments=Cv}, Sub0, St0) ->
+ {Kv,Sub1,St1} = pattern_bin(Cv, Sub0, St0),
+ {#k_binary{anno=A,segs=Kv},Sub1,St1};
+pattern(#c_alias{anno=A,var=Cv,pat=Cp}, Sub0, St0) ->
+ {Cvs,Cpat} = flatten_alias(Cp),
+ {Kvs,Sub1,St1} = pattern_list([Cv|Cvs], Sub0, St0),
+ {Kpat,Sub2,St2} = pattern(Cpat, Sub1, St1),
+ {#ialias{anno=A,vars=Kvs,pat=Kpat},Sub2,St2}.
+
+flatten_alias(#c_alias{var=V,pat=P}) ->
+ {Vs,Pat} = flatten_alias(P),
+ {[V|Vs],Pat};
+flatten_alias(Pat) -> {[],Pat}.
+
+pattern_bin(Es, Sub, St) -> pattern_bin(Es, Sub, St, 0).
+
+pattern_bin([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
+ Sub0, St0, B0) ->
+ {S1,[],St1} = expr(S0, Sub0, St0),
+ U0 = core_lib:literal_value(U),
+ Fs0 = core_lib:literal_value(Fs),
+ %%ok= io:fwrite("~w: ~p~n", [?LINE,{B0,S1,U0,Fs0}]),
+ {B1,Fs1} = aligned(B0, S1, U0, Fs0),
+ {E,Sub1,St2} = pattern(E0, Sub0, St1),
+ {Es,Sub2,St3} = pattern_bin(Es0, Sub1, St2, B1),
+ {#k_bin_seg{anno=A,size=S1,
+ unit=U0,
+ type=core_lib:literal_value(T),
+ flags=Fs1,
+ seg=E,next=Es},
+ Sub2,St3};
+pattern_bin([], Sub, St, _Bits) -> {#k_bin_end{},Sub,St}.
+
+%% pattern_list([Cexpr], Sub, State) -> {[Kexpr],Sub,State}.
+
+pattern_list(Ces, Sub, St) ->
+ foldr(fun (Ce, {Kes,Sub0,St0}) ->
+ {Ke,Sub1,St1} = pattern(Ce, Sub0, St0),
+ {[Ke|Kes],Sub1,St1}
+ end, {[],Sub,St}, Ces).
+
+%% new_sub() -> Subs.
+%% set_vsub(Name, Sub, Subs) -> Subs.
+%% subst_vsub(Name, Sub, Subs) -> Subs.
+%% get_vsub(Name, Subs) -> SubName.
+%% Add/get substitute Sub for Name to VarSub. Use orddict so we know
+%% the format is a list {Name,Sub} pairs. When adding a new
+%% substitute we fold substitute chains so we never have to search
+%% more than once.
+
+new_sub() -> orddict:new().
+
+get_vsub(V, Vsub) ->
+ case orddict:find(V, Vsub) of
+ {ok,Val} -> Val;
+ error -> V
+ end.
+
+set_vsub(V, S, Vsub) ->
+ orddict:store(V, S, Vsub).
+
+subst_vsub(V, S, Vsub0) ->
+ %% Fold chained substitutions.
+ Vsub1 = orddict:map(fun (_, V1) when V1 =:= V -> S;
+ (_, V1) -> V1
+ end, Vsub0),
+ orddict:store(V, S, Vsub1).
+
+get_fsub(F, A, Fsub) ->
+ case orddict:find({F,A}, Fsub) of
+ {ok,Val} -> Val;
+ error -> F
+ end.
+
+set_fsub(F, A, S, Fsub) ->
+ orddict:store({F,A}, S, Fsub).
+
+new_fun_name(St) ->
+ new_fun_name("anonymous", St).
+
+%% new_fun_name(Type, State) -> {FunName,State}.
+
+new_fun_name(Type, #kern{func={F,Arity},fcount=C}=St) ->
+ Name = "-" ++ atom_to_list(F) ++ "/" ++ integer_to_list(Arity) ++
+ "-" ++ Type ++ "-" ++ integer_to_list(C) ++ "-",
+ {list_to_atom(Name),St#kern{fcount=C+1}}.
+
+%% new_var_name(State) -> {VarName,State}.
+
+new_var_name(#kern{vcount=C}=St) ->
+ {list_to_atom("ker" ++ integer_to_list(C)),St#kern{vcount=C+1}}.
+
+%% new_var(State) -> {#k_var{},State}.
+
+new_var(St0) ->
+ {New,St1} = new_var_name(St0),
+ {#k_var{name=New},St1}.
+
+%% new_vars(Count, State) -> {[#k_var{}],State}.
+%% Make Count new variables.
+
+new_vars(N, St) -> new_vars(N, St, []).
+
+new_vars(N, St0, Vs) when N > 0 ->
+ {V,St1} = new_var(St0),
+ new_vars(N-1, St1, [V|Vs]);
+new_vars(0, St, Vs) -> {Vs,St}.
+
+make_vars(Vs) -> [ #k_var{name=V} || V <- Vs ].
+
+add_var_def(V, St) ->
+ St#kern{ds=sets:add_element(V#k_var.name, St#kern.ds)}.
+
+%%add_vars_def(Vs, St) ->
+%% Ds = foldl(fun (#k_var{name=V}, Ds) -> add_element(V, Ds) end,
+%% St#kern.ds, Vs),
+%% St#kern{ds=Ds}.
+
+%% is_remote_bif(Mod, Name, Arity) -> true | false.
+%% Test if function is really a BIF.
+
+is_remote_bif(erlang, is_boolean, 1) ->
+ %% XXX Remove this clause in R11. For bootstrap purposes, we must
+ %% recognize erlang:is_boolean/1 here.
+ true;
+is_remote_bif(erlang, internal_is_record, 3) -> true;
+is_remote_bif(erlang, get, 1) -> true;
+is_remote_bif(erlang, N, A) ->
+ case erl_internal:guard_bif(N, A) of
+ true -> true;
+ false ->
+ case erl_internal:type_test(N, A) of
+ true -> true;
+ false ->
+ case catch erl_internal:op_type(N, A) of
+ arith -> true;
+ bool -> true;
+ comp -> true;
+ _Other -> false %List, send or not an op
+ end
+ end
+ end;
+is_remote_bif(_, _, _) -> false.
+
+%% bif_vals(Name, Arity) -> integer().
+%% bif_vals(Mod, Name, Arity) -> integer().
+%% Determine how many return values a BIF has. Provision for BIFs to
+%% return multiple values. Only used in bodies where a BIF may be
+%% called for effect only.
+
+bif_vals(dsetelement, 3) -> 0;
+bif_vals(_, _) -> 1.
+
+bif_vals(_, _, _) -> 1.
+
+%% foldr2(Fun, Acc, List1, List2) -> Acc.
+%% Fold over two lists.
+
+foldr2(Fun, Acc0, [E1|L1], [E2|L2]) ->
+ Acc1 = Fun(E1, E2, Acc0),
+ foldr2(Fun, Acc1, L1, L2);
+foldr2(_, Acc, [], []) -> Acc.
+
+%% first([A]) -> [A].
+%% last([A]) -> A.
+
+last([L]) -> L;
+last([_|T]) -> last(T).
+
+first([_]) -> [];
+first([H|T]) -> [H|first(T)].
+
+%% This code implements the algorithm for an optimizing compiler for
+%% pattern matching given "The Implementation of Functional
+%% Programming Languages" by Simon Peyton Jones. The code is much
+%% longer as the meaning of constructors is different from the book.
+%%
+%% In Erlang many constructors can have different values, e.g. 'atom'
+%% or 'integer', whereas in the original algorithm thse would be
+%% different constructors. Our view makes it easier in later passes to
+%% handle indexing over each type.
+%%
+%% Patterns are complicated by having alias variables. The form of a
+%% pattern is Pat | {alias,Pat,[AliasVar]}. This is hidden by access
+%% functions to pattern arguments but the code must be aware of it.
+%%
+%% The compilation proceeds in two steps:
+%%
+%% 1. The patterns in the clauses to converted to lists of kernel
+%% patterns. The Core clause is now hybrid, this is easier to work
+%% with. Remove clauses with trivially false guards, this simplifies
+%% later passes. Add local defined vars and variable subs to each
+%% clause for later use.
+%%
+%% 2. The pattern matching is optimised. Variable substitutions are
+%% added to the VarSub structure and new variables are made visible.
+%% The guard and body are then converted to Kernel form.
+
+%% kmatch([Var], [Clause], Sub, State) -> {Kexpr,[PreExpr],State}.
+
+kmatch(Us, Ccs, Sub, St0) ->
+ {Cs,St1} = match_pre(Ccs, Sub, St0), %Convert clauses
+ %%Def = kernel_match_error, %The strict case
+ %% This should be a kernel expression from the first pass.
+ Def = #k_call{anno=[compiler_generated],
+ op=#k_remote{mod=#k_atom{val=erlang},
+ name=#k_atom{val=exit},
+ arity=1},
+ args=[#k_atom{val=kernel_match_error}]},
+ {Km,St2} = match(Us, Cs, Def, St1), %Do the match.
+ {Km,St2}.
+
+%% match_pre([Cclause], Sub, State) -> {[Clause],State}.
+%% Must be careful not to generate new substitutions here now!
+%% Remove clauses with trivially false guards which will never
+%% succeed.
+
+match_pre(Cs, Sub0, St) ->
+ foldr(fun (#c_clause{anno=A,pats=Ps,guard=G,body=B}, {Cs0,St0}) ->
+ case is_false_guard(G) of
+ true -> {Cs0,St0};
+ false ->
+ {Kps,Sub1,St1} = pattern_list(Ps, Sub0, St0),
+ {[#iclause{anno=A,sub=Sub1,pats=Kps,guard=G,body=B}|
+ Cs0],St1}
+ end
+ end, {[],St}, Cs).
+
+%% match([Var], [Clause], Default, State) -> {MatchExpr,State}.
+
+match([U|Us], Cs, Def, St0) ->
+ %%ok = io:format("match ~p~n", [Cs]),
+ Pcss = partition(Cs),
+ foldr(fun (Pcs, {D,St}) -> match_varcon([U|Us], Pcs, D, St) end,
+ {Def,St0}, Pcss);
+match([], Cs, Def, St) ->
+ match_guard(Cs, Def, St).
+
+%% match_guard([Clause], Default, State) -> {IfExpr,State}.
+%% Build a guard to handle guards. A guard *ALWAYS* fails if no
+%% clause matches, there will be a surrounding 'alt' to catch the
+%% failure. Drop redundant cases, i.e. those after a true guard.
+
+match_guard(Cs0, Def0, St0) ->
+ {Cs1,Def1,St1} = match_guard_1(Cs0, Def0, St0),
+ {build_alt(build_guard(Cs1), Def1),St1}.
+
+match_guard_1([#iclause{anno=A,sub=Sub,guard=G,body=B}|Cs0], Def0, St0) ->
+ case is_true_guard(G) of
+ true ->
+ %% The true clause body becomes the default.
+ {Kb,Pb,St1} = body(B, Sub, St0),
+ Line = get_line(A),
+ St2 = maybe_add_warning(Cs0, Line, St1),
+ St = maybe_add_warning(Def0, Line, St2),
+ {[],pre_seq(Pb, Kb),St};
+ false ->
+ {Kg,St1} = guard(G, Sub, St0),
+ {Kb,Pb,St2} = body(B, Sub, St1),
+ {Cs1,Def1,St3} = match_guard_1(Cs0, Def0, St2),
+ {[#k_guard_clause{guard=Kg,body=pre_seq(Pb, Kb)}|Cs1],
+ Def1,St3}
+ end;
+match_guard_1([], Def, St) -> {[],Def,St}.
+
+maybe_add_warning([C|_], Line, St) ->
+ maybe_add_warning(C, Line, St);
+maybe_add_warning([], _Line, St) -> St;
+maybe_add_warning(fail, _Line, St) -> St;
+maybe_add_warning(Ke, MatchLine, St) ->
+ case get_kanno(Ke) of
+ [compiler_generated|_] -> St;
+ Anno ->
+ Line = get_line(Anno),
+ Warn = case MatchLine of
+ none -> nomatch_shadow;
+ _ -> {nomatch_shadow,MatchLine}
+ end,
+ add_warning(Line, Warn, St)
+ end.
+
+get_line([Line|_]) when is_integer(Line) -> Line;
+get_line([_|T]) -> get_line(T);
+get_line([]) -> none.
+
+
+%% is_true_guard(Guard) -> boolean().
+%% is_false_guard(Guard) -> boolean().
+%% Test if a guard is either trivially true/false. This has probably
+%% already been optimised away, but what the heck!
+
+is_true_guard(G) -> guard_value(G) == true.
+is_false_guard(G) -> guard_value(G) == false.
+
+%% guard_value(Guard) -> true | false | unknown.
+
+guard_value(#c_atom{val=true}) -> true;
+guard_value(#c_atom{val=false}) -> false;
+guard_value(#c_call{module=#c_atom{val=erlang},
+ name=#c_atom{val='not'},
+ args=[A]}) ->
+ case guard_value(A) of
+ true -> false;
+ false -> true;
+ unknown -> unknown
+ end;
+guard_value(#c_call{module=#c_atom{val=erlang},
+ name=#c_atom{val='and'},
+ args=[Ca,Cb]}) ->
+ case guard_value(Ca) of
+ true -> guard_value(Cb);
+ false -> false;
+ unknown ->
+ case guard_value(Cb) of
+ false -> false;
+ _Other -> unknown
+ end
+ end;
+guard_value(#c_call{module=#c_atom{val=erlang},
+ name=#c_atom{val='or'},
+ args=[Ca,Cb]}) ->
+ case guard_value(Ca) of
+ true -> true;
+ false -> guard_value(Cb);
+ unknown ->
+ case guard_value(Cb) of
+ true -> true;
+ _Other -> unknown
+ end
+ end;
+guard_value(#c_try{arg=E,vars=[#c_var{name=X}],body=#c_var{name=X},
+ handler=#c_atom{val=false}}) ->
+ guard_value(E);
+guard_value(_) -> unknown.
+
+%% partition([Clause]) -> [[Clause]].
+%% Partition a list of clauses into groups which either contain
+%% clauses with a variable first argument, or with a "constructor".
+
+partition([C1|Cs]) ->
+ V1 = is_var_clause(C1),
+ {More,Rest} = splitwith(fun (C) -> is_var_clause(C) == V1 end, Cs),
+ [[C1|More]|partition(Rest)];
+partition([]) -> [].
+
+%% match_varcon([Var], [Clause], Def, [Var], Sub, State) ->
+%% {MatchExpr,State}.
+
+match_varcon(Us, [C|_]=Cs, Def, St) ->
+ case is_var_clause(C) of
+ true -> match_var(Us, Cs, Def, St);
+ false -> match_con(Us, Cs, Def, St)
+ end.
+
+%% match_var([Var], [Clause], Def, State) -> {MatchExpr,State}.
+%% Build a call to "select" from a list of clauses all containing a
+%% variable as the first argument. We must rename the variable in
+%% each clause to be the match variable as these clause will share
+%% this variable and may have different names for it. Rename aliases
+%% as well.
+
+match_var([U|Us], Cs0, Def, St) ->
+ Cs1 = map(fun (#iclause{sub=Sub0,pats=[Arg|As]}=C) ->
+ Vs = [arg_arg(Arg)|arg_alias(Arg)],
+ Sub1 = foldl(fun (#k_var{name=V}, Acc) ->
+ subst_vsub(V, U#k_var.name, Acc)
+ end, Sub0, Vs),
+ C#iclause{sub=Sub1,pats=As}
+ end, Cs0),
+ match(Us, Cs1, Def, St).
+
+%% match_con(Variables, [Clause], Default, State) -> {SelectExpr,State}.
+%% Build call to "select" from a list of clauses all containing a
+%% constructor/constant as first argument. Group the constructors
+%% according to type, the order is really irrelevant but tries to be
+%% smart.
+
+match_con([U|Us], Cs, Def, St0) ->
+ %% Extract clauses for different constructors (types).
+ %%ok = io:format("match_con ~p~n", [Cs]),
+ Ttcs = [ {T,Tcs} || T <- [k_cons,k_tuple,k_atom,k_float,k_int,k_nil,
+ k_binary,k_bin_end],
+ begin Tcs = select(T, Cs),
+ Tcs /= []
+ end ] ++ select_bin_con(Cs),
+ %%ok = io:format("ttcs = ~p~n", [Ttcs]),
+ {Scs,St1} =
+ mapfoldl(fun ({T,Tcs}, St) ->
+ {[S|_]=Sc,S1} = match_value([U|Us], T, Tcs, fail, St),
+ %%ok = io:format("match_con type2 ~p~n", [T]),
+ Anno = get_kanno(S),
+ {#k_type_clause{anno=Anno,type=T,values=Sc},S1} end,
+ St0, Ttcs),
+ {build_alt_1st_no_fail(build_select(U, Scs), Def),St1}.
+
+%% select_bin_con([Clause]) -> [{Type,[Clause]}].
+%% Extract clauses for the k_bin_seg constructor. As k_bin_seg
+%% matching can overlap, the k_bin_seg constructors cannot be
+%% reordered, only grouped.
+
+select_bin_con(Cs0) ->
+ Cs1 = lists:filter(fun (C) ->
+ clause_con(C) == k_bin_seg
+ end, Cs0),
+ select_bin_con_1(Cs1).
+
+select_bin_con_1([C1|Cs]) ->
+ Con = clause_con(C1),
+ {More,Rest} = splitwith(fun (C) -> clause_con(C) == Con end, Cs),
+ [{Con,[C1|More]}|select_bin_con_1(Rest)];
+select_bin_con_1([]) -> [].
+
+%% select(Con, [Clause]) -> [Clause].
+
+select(T, Cs) -> [ C || C <- Cs, clause_con(C) == T ].
+
+%% match_value([Var], Con, [Clause], Default, State) -> {SelectExpr,State}.
+%% At this point all the clauses have the same constructor, we must
+%% now separate them according to value.
+
+match_value(_, _, [], _, St) -> {[],St};
+match_value(Us, T, Cs0, Def, St0) ->
+ Css = group_value(T, Cs0),
+ %%ok = io:format("match_value ~p ~p~n", [T, Css]),
+ {Css1,St1} = mapfoldl(fun (Cs, St) ->
+ match_clause(Us, Cs, Def, St) end,
+ St0, Css),
+ {Css1,St1}.
+ %%{#k_select_val{type=T,var=hd(Us),clauses=Css1},St1}.
+
+%% group_value([Clause]) -> [[Clause]].
+%% Group clauses according to value. Here we know that
+%% 1. Some types are singled valued
+%% 2. The clauses in bin_segs cannot be reordered only grouped
+%% 3. Other types are disjoint and can be reordered
+
+group_value(k_cons, Cs) -> [Cs]; %These are single valued
+group_value(k_nil, Cs) -> [Cs];
+group_value(k_binary, Cs) -> [Cs];
+group_value(k_bin_end, Cs) -> [Cs];
+group_value(k_bin_seg, Cs) ->
+ group_bin_seg(Cs);
+group_value(_, Cs) ->
+ %% group_value(Cs).
+ Cd = foldl(fun (C, Gcs0) -> dict:append(clause_val(C), C, Gcs0) end,
+ dict:new(), Cs),
+ dict:fold(fun (_, Vcs, Css) -> [Vcs|Css] end, [], Cd).
+
+group_bin_seg([C1|Cs]) ->
+ V1 = clause_val(C1),
+ {More,Rest} = splitwith(fun (C) -> clause_val(C) == V1 end, Cs),
+ [[C1|More]|group_bin_seg(Rest)];
+group_bin_seg([]) -> [].
+
+%% Profiling shows that this quadratic implementation account for a big amount
+%% of the execution time if there are many values.
+% group_value([C|Cs]) ->
+% V = clause_val(C),
+% Same = [ Cv || Cv <- Cs, clause_val(Cv) == V ], %Same value
+% Rest = [ Cv || Cv <- Cs, clause_val(Cv) /= V ], % and all the rest
+% [[C|Same]|group_value(Rest)];
+% group_value([]) -> [].
+
+%% match_clause([Var], [Clause], Default, State) -> {Clause,State}.
+%% At this point all the clauses have the same "value". Build one
+%% select clause for this value and continue matching. Rename
+%% aliases as well.
+
+match_clause([U|Us], [C|_]=Cs0, Def, St0) ->
+ Anno = get_kanno(C),
+ {Match0,Vs,St1} = get_match(get_con(Cs0), St0),
+ Match = sub_size_var(Match0, Cs0),
+ {Cs1,St2} = new_clauses(Cs0, U, St1),
+ {B,St3} = match(Vs ++ Us, Cs1, Def, St2),
+ {#k_val_clause{anno=Anno,val=Match,body=B},St3}.
+
+sub_size_var(#k_bin_seg{size=#k_var{name=Name}=Kvar}=BinSeg, [#iclause{sub=Sub}|_]) ->
+ BinSeg#k_bin_seg{size=Kvar#k_var{name=get_vsub(Name, Sub)}};
+sub_size_var(K, _) -> K.
+
+get_con([C|_]) -> arg_arg(clause_arg(C)). %Get the constructor
+
+get_match(#k_cons{}, St0) ->
+ {[H,T],St1} = new_vars(2, St0),
+ {#k_cons{hd=H,tl=T},[H,T],St1};
+get_match(#k_binary{}, St0) ->
+ {[V]=Mes,St1} = new_vars(1, St0),
+ {#k_binary{segs=V},Mes,St1};
+get_match(#k_bin_seg{}=Seg, St0) ->
+ {[S,N]=Mes,St1} = new_vars(2, St0),
+ {Seg#k_bin_seg{seg=S,next=N},Mes,St1};
+get_match(#k_tuple{es=Es}, St0) ->
+ {Mes,St1} = new_vars(length(Es), St0),
+ {#k_tuple{es=Mes},Mes,St1};
+get_match(M, St) ->
+ {M,[],St}.
+
+new_clauses(Cs0, U, St) ->
+ Cs1 = map(fun (#iclause{sub=Sub0,pats=[Arg|As]}=C) ->
+ Head = case arg_arg(Arg) of
+ #k_cons{hd=H,tl=T} -> [H,T|As];
+ #k_tuple{es=Es} -> Es ++ As;
+ #k_binary{segs=E} -> [E|As];
+ #k_bin_seg{seg=S,next=N} ->
+ [S,N|As];
+ _Other -> As
+ end,
+ Vs = arg_alias(Arg),
+ Sub1 = foldl(fun (#k_var{name=V}, Acc) ->
+ subst_vsub(V, U#k_var.name, Acc)
+ end, Sub0, Vs),
+ C#iclause{sub=Sub1,pats=Head}
+ end, Cs0),
+ {Cs1,St}.
+
+%% build_guard([GuardClause]) -> GuardExpr.
+
+build_guard([]) -> fail;
+build_guard(Cs) -> #k_guard{clauses=Cs}.
+
+%% build_select(Var, [ConClause]) -> SelectExpr.
+
+build_select(V, [Tc|_]=Tcs) ->
+ Anno = get_kanno(Tc),
+ #k_select{anno=Anno,var=V,types=Tcs}.
+
+%% build_alt(First, Then) -> AltExpr.
+%% Build an alt, attempt some simple optimisation.
+
+build_alt(fail, Then) -> Then;
+build_alt(First,Then) -> build_alt_1st_no_fail(First, Then).
+
+build_alt_1st_no_fail(First, fail) -> First;
+build_alt_1st_no_fail(First, Then) -> #k_alt{first=First,then=Then}.
+
+%% build_match([MatchVar], MatchExpr) -> Kexpr.
+%% Build a match expr if there is a match.
+
+build_match(Us, #k_alt{}=Km) -> #k_match{vars=Us,body=Km};
+build_match(Us, #k_select{}=Km) -> #k_match{vars=Us,body=Km};
+build_match(Us, #k_guard{}=Km) -> #k_match{vars=Us,body=Km};
+build_match(_, Km) -> Km.
+
+%% clause_arg(Clause) -> FirstArg.
+%% clause_con(Clause) -> Constructor.
+%% clause_val(Clause) -> Value.
+%% is_var_clause(Clause) -> boolean().
+
+clause_arg(#iclause{pats=[Arg|_]}) -> Arg.
+
+clause_con(C) -> arg_con(clause_arg(C)).
+
+clause_val(C) -> arg_val(clause_arg(C)).
+
+is_var_clause(C) -> clause_con(C) == k_var.
+
+%% arg_arg(Arg) -> Arg.
+%% arg_alias(Arg) -> Aliases.
+%% arg_con(Arg) -> Constructor.
+%% arg_val(Arg) -> Value.
+%% These are the basic functions for obtaining fields in an argument.
+
+arg_arg(#ialias{pat=Con}) -> Con;
+arg_arg(Con) -> Con.
+
+arg_alias(#ialias{vars=As}) -> As;
+arg_alias(_Con) -> [].
+
+arg_con(Arg) ->
+ case arg_arg(Arg) of
+ #k_int{} -> k_int;
+ #k_float{} -> k_float;
+ #k_atom{} -> k_atom;
+ #k_nil{} -> k_nil;
+ #k_cons{} -> k_cons;
+ #k_tuple{} -> k_tuple;
+ #k_binary{} -> k_binary;
+ #k_bin_end{} -> k_bin_end;
+ #k_bin_seg{} -> k_bin_seg;
+ #k_var{} -> k_var
+ end.
+
+arg_val(Arg) ->
+ case arg_arg(Arg) of
+ #k_int{val=I} -> I;
+ #k_float{val=F} -> F;
+ #k_atom{val=A} -> A;
+ #k_nil{} -> 0;
+ #k_cons{} -> 2;
+ #k_tuple{es=Es} -> length(Es);
+ #k_bin_seg{size=S,unit=U,type=T,flags=Fs} ->
+ {set_kanno(S, []),U,T,Fs};
+ #k_bin_end{} -> 0;
+ #k_binary{} -> 0
+ end.
+
+%% ubody(Expr, Break, State) -> {Expr,[UsedVar],State}.
+%% Tag the body sequence with its used variables. These bodies
+%% either end with a #k_break{}, or with #k_return{} or an expression
+%% which itself can return, #k_enter{}, #k_match{} ... .
+
+ubody(#iset{vars=[],arg=#iletrec{}=Let,body=B0}, Br, St0) ->
+ %% An iletrec{} should never be last.
+ St1 = iletrec_funs(Let, St0),
+ ubody(B0, Br, St1);
+ubody(#iset{anno=A,vars=Vs,arg=E0,body=B0}, Br, St0) ->
+ {E1,Eu,St1} = uexpr(E0, {break,Vs}, St0),
+ {B1,Bu,St2} = ubody(B0, Br, St1),
+ Ns = lit_list_vars(Vs),
+ Used = union(Eu, subtract(Bu, Ns)), %Used external vars
+ {#k_seq{anno=#k{us=Used,ns=Ns,a=A},arg=E1,body=B1},Used,St2};
+ubody(#ivalues{anno=A,args=As}, return, St) ->
+ Au = lit_list_vars(As),
+ {#k_return{anno=#k{us=Au,ns=[],a=A},args=As},Au,St};
+ubody(#ivalues{anno=A,args=As}, {break,_Vbs}, St) ->
+ Au = lit_list_vars(As),
+ {#k_break{anno=#k{us=Au,ns=[],a=A},args=As},Au,St};
+ubody(E, return, St0) ->
+ %% Enterable expressions need no trailing return.
+ case is_enter_expr(E) of
+ true -> uexpr(E, return, St0);
+ false ->
+ {Ea,Pa,St1} = force_atomic(E, St0),
+ ubody(pre_seq(Pa, #ivalues{args=[Ea]}), return, St1)
+ end;
+ubody(E, {break,Rs}, St0) ->
+ %%ok = io:fwrite("ubody ~w:~p~n", [?LINE,{E,Br}]),
+ %% Exiting expressions need no trailing break.
+ case is_exit_expr(E) of
+ true -> uexpr(E, return, St0);
+ false ->
+ {Ea,Pa,St1} = force_atomic(E, St0),
+ ubody(pre_seq(Pa, #ivalues{args=[Ea]}), {break,Rs}, St1)
+ end.
+
+iletrec_funs(#iletrec{defs=Fs}, St0) ->
+ %% Use union of all free variables.
+ %% First just work out free variables for all functions.
+ Free = foldl(fun ({_,#ifun{vars=Vs,body=Fb0}}, Free0) ->
+ {_,Fbu,_} = ubody(Fb0, return, St0),
+ Ns = lit_list_vars(Vs),
+ Free1 = subtract(Fbu, Ns),
+ union(Free1, Free0)
+ end, [], Fs),
+ FreeVs = make_vars(Free),
+ %% Add this free info to State.
+ St1 = foldl(fun ({N,#ifun{vars=Vs}}, Lst) ->
+ store_free(N, length(Vs), FreeVs, Lst)
+ end, St0, Fs),
+ %% Now regenerate local functions to use free variable information.
+ St2 = foldl(fun ({N,#ifun{anno=Fa,vars=Vs,body=Fb0}}, Lst0) ->
+ {Fb1,_,Lst1} = ubody(Fb0, return, Lst0),
+ Arity = length(Vs) + length(FreeVs),
+ Fun = #k_fdef{anno=#k{us=[],ns=[],a=Fa},
+ func=N,arity=Arity,
+ vars=Vs ++ FreeVs,body=Fb1},
+ Lst1#kern{funs=[Fun|Lst1#kern.funs]}
+ end, St1, Fs),
+ St2.
+
+%% is_exit_expr(Kexpr) -> boolean().
+%% Test whether Kexpr always exits and never returns.
+
+is_exit_expr(#k_call{op=#k_remote{mod=erlang,name=throw,arity=1}}) -> true;
+is_exit_expr(#k_call{op=#k_remote{mod=erlang,name=exit,arity=1}}) -> true;
+is_exit_expr(#k_call{op=#k_remote{mod=erlang,name=error,arity=1}}) -> true;
+is_exit_expr(#k_call{op=#k_remote{mod=erlang,name=error,arity=2}}) -> true;
+is_exit_expr(#k_call{op=#k_remote{mod=erlang,name=fault,arity=1}}) -> true;
+is_exit_expr(#k_call{op=#k_remote{mod=erlang,name=fault,arity=2}}) -> true;
+is_exit_expr(#k_call{op=#k_internal{name=match_fail,arity=1}}) -> true;
+is_exit_expr(#k_bif{op=#k_internal{name=rethrow,arity=2}}) -> true;
+is_exit_expr(#k_receive_next{}) -> true;
+is_exit_expr(_) -> false.
+
+%% is_enter_expr(Kexpr) -> boolean().
+%% Test whether Kexpr is "enterable", i.e. can handle return from
+%% within itself without extra #k_return{}.
+
+is_enter_expr(#k_call{}) -> true;
+is_enter_expr(#k_match{}) -> true;
+is_enter_expr(#k_receive{}) -> true;
+is_enter_expr(#k_receive_next{}) -> true;
+%%is_enter_expr(#k_try{}) -> true; %Soon
+is_enter_expr(_) -> false.
+
+%% uguard(Expr, State) -> {Expr,[UsedVar],State}.
+%% Tag the guard sequence with its used variables.
+
+uguard(#k_try{anno=A,arg=B0,vars=[#k_var{name=X}],body=#k_var{name=X},
+ handler=#k_atom{val=false}}=Try, St0) ->
+ {B1,Bu,St1} = uguard(B0, St0),
+ {Try#k_try{anno=#k{us=Bu,ns=[],a=A},arg=B1},Bu,St1};
+uguard(T, St) ->
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,T]),
+ uguard_test(T, St).
+
+%% uguard_test(Expr, State) -> {Test,[UsedVar],State}.
+%% At this stage tests are just expressions which don't return any
+%% values.
+
+uguard_test(T, St) -> uguard_expr(T, [], St).
+
+uguard_expr(#iset{anno=A,vars=Vs,arg=E0,body=B0}, Rs, St0) ->
+ Ns = lit_list_vars(Vs),
+ {E1,Eu,St1} = uguard_expr(E0, Vs, St0),
+ {B1,Bu,St2} = uguard_expr(B0, Rs, St1),
+ Used = union(Eu, subtract(Bu, Ns)),
+ {#k_seq{anno=#k{us=Used,ns=Ns,a=A},arg=E1,body=B1},Used,St2};
+uguard_expr(#k_try{anno=A,arg=B0,vars=[#k_var{name=X}],body=#k_var{name=X},
+ handler=#k_atom{val=false}}=Try, Rs, St0) ->
+ {B1,Bu,St1} = uguard_expr(B0, Rs, St0),
+ {Try#k_try{anno=#k{us=Bu,ns=lit_list_vars(Rs),a=A},arg=B1,ret=Rs},
+ Bu,St1};
+uguard_expr(#k_test{anno=A,op=Op,args=As}=Test, Rs, St) ->
+ [] = Rs, %Sanity check
+ Used = union(op_vars(Op), lit_list_vars(As)),
+ {Test#k_test{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A}},
+ Used,St};
+uguard_expr(#k_bif{anno=A,op=Op,args=As}=Bif, Rs, St) ->
+ Used = union(op_vars(Op), lit_list_vars(As)),
+ {Bif#k_bif{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A},ret=Rs},
+ Used,St};
+uguard_expr(#ivalues{anno=A,args=As}, Rs, St) ->
+ Sets = foldr2(fun (V, Arg, Rhs) ->
+ #iset{anno=A,vars=[V],arg=Arg,body=Rhs}
+ end, #k_atom{val=true}, Rs, As),
+ uguard_expr(Sets, [], St);
+uguard_expr(#k_match{anno=A,vars=Vs,body=B0}, Rs, St0) ->
+ %% Experimental support for andalso/orelse in guards.
+ Br = case Rs of
+ [] -> return;
+ _ -> {break,Rs}
+ end,
+ {B1,Bu,St1} = umatch(B0, Br, St0),
+ {#k_match{anno=#k{us=Bu,ns=lit_list_vars(Rs),a=A},
+ vars=Vs,body=B1,ret=Rs},Bu,St1};
+uguard_expr(Lit, Rs, St) ->
+ %% Transform literals to puts here.
+ Used = lit_vars(Lit),
+ {#k_put{anno=#k{us=Used,ns=lit_list_vars(Rs),a=get_kanno(Lit)},
+ arg=Lit,ret=Rs},Used,St}.
+
+%% uexpr(Expr, Break, State) -> {Expr,[UsedVar],State}.
+%% Tag an expression with its used variables.
+%% Break = return | {break,[RetVar]}.
+
+uexpr(#k_call{anno=A,op=#k_local{name=F,arity=Ar}=Op,args=As0}=Call, Br, St) ->
+ Free = get_free(F, Ar, St),
+ As1 = As0 ++ Free, %Add free variables LAST!
+ Used = lit_list_vars(As1),
+ {case Br of
+ {break,Rs} ->
+ Call#k_call{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A},
+ op=Op#k_local{arity=Ar + length(Free)},
+ args=As1,ret=Rs};
+ return ->
+ #k_enter{anno=#k{us=Used,ns=[],a=A},
+ op=Op#k_local{arity=Ar + length(Free)},
+ args=As1}
+ end,Used,St};
+uexpr(#k_call{anno=A,op=Op,args=As}=Call, {break,Rs}, St) ->
+ Used = union(op_vars(Op), lit_list_vars(As)),
+ {Call#k_call{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A},ret=Rs},
+ Used,St};
+uexpr(#k_call{anno=A,op=Op,args=As}, return, St) ->
+ Used = union(op_vars(Op), lit_list_vars(As)),
+ {#k_enter{anno=#k{us=Used,ns=[],a=A},op=Op,args=As},
+ Used,St};
+uexpr(#k_bif{anno=A,op=Op,args=As}=Bif, {break,Rs}, St0) ->
+ Used = union(op_vars(Op), lit_list_vars(As)),
+ {Brs,St1} = bif_returns(Op, Rs, St0),
+ {Bif#k_bif{anno=#k{us=Used,ns=lit_list_vars(Brs),a=A},ret=Brs},
+ Used,St1};
+uexpr(#k_match{anno=A,vars=Vs,body=B0}, Br, St0) ->
+ Rs = break_rets(Br),
+ {B1,Bu,St1} = umatch(B0, Br, St0),
+ {#k_match{anno=#k{us=Bu,ns=lit_list_vars(Rs),a=A},
+ vars=Vs,body=B1,ret=Rs},Bu,St1};
+uexpr(#k_receive{anno=A,var=V,body=B0,timeout=T,action=A0}, Br, St0) ->
+ Rs = break_rets(Br),
+ Tu = lit_vars(T), %Timeout is atomic
+ {B1,Bu,St1} = umatch(B0, Br, St0),
+ {A1,Au,St2} = ubody(A0, Br, St1),
+ Used = del_element(V#k_var.name, union(Bu, union(Tu, Au))),
+ {#k_receive{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A},
+ var=V,body=B1,timeout=T,action=A1,ret=Rs},
+ Used,St2};
+uexpr(#k_receive_accept{anno=A}, _, St) ->
+ {#k_receive_accept{anno=#k{us=[],ns=[],a=A}},[],St};
+uexpr(#k_receive_next{anno=A}, _, St) ->
+ {#k_receive_next{anno=#k{us=[],ns=[],a=A}},[],St};
+uexpr(#k_try{anno=A,arg=A0,vars=Vs,body=B0,evars=Evs,handler=H0},
+ {break,Rs0}, St0) ->
+ {Avs,St1} = new_vars(length(Vs), St0), %Need dummy names here
+ {A1,Au,St2} = ubody(A0, {break,Avs}, St1), %Must break to clean up here!
+ {B1,Bu,St3} = ubody(B0, {break,Rs0}, St2),
+ {H1,Hu,St4} = ubody(H0, {break,Rs0}, St3),
+ %% Guarantee ONE return variable.
+ NumNew = if
+ Rs0 =:= [] -> 1;
+ true -> 0
+ end,
+ {Ns,St5} = new_vars(NumNew, St4),
+ Rs1 = Rs0 ++ Ns,
+ Used = union([Au,subtract(Bu, lit_list_vars(Vs)),
+ subtract(Hu, lit_list_vars(Evs))]),
+ {#k_try{anno=#k{us=Used,ns=lit_list_vars(Rs1),a=A},
+ arg=A1,vars=Vs,body=B1,evars=Evs,handler=H1,ret=Rs1},
+ Used,St5};
+uexpr(#k_catch{anno=A,body=B0}, {break,Rs0}, St0) ->
+ {Rb,St1} = new_var(St0),
+ {B1,Bu,St2} = ubody(B0, {break,[Rb]}, St1),
+ %% Guarantee ONE return variable.
+ {Ns,St3} = new_vars(1 - length(Rs0), St2),
+ Rs1 = Rs0 ++ Ns,
+ {#k_catch{anno=#k{us=Bu,ns=lit_list_vars(Rs1),a=A},body=B1,ret=Rs1},Bu,St3};
+uexpr(#ifun{anno=A,vars=Vs,body=B0}=IFun, {break,Rs}, St0) ->
+ {B1,Bu,St1} = ubody(B0, return, St0), %Return out of new function
+ Ns = lit_list_vars(Vs),
+ Free = subtract(Bu, Ns), %Free variables in fun
+ Fvs = make_vars(Free),
+ Arity = length(Vs) + length(Free),
+ {{Index,Uniq,Fname}, St3} =
+ case lists:keysearch(id, 1, A) of
+ {value,{id,Id}} ->
+ {Id, St1};
+ false ->
+ %% No id annotation. Must invent one.
+ I = St1#kern.fcount,
+ U = erlang:hash(IFun, (1 bsl 27)-1),
+ {N, St2} = new_fun_name(St1),
+ {{I,U,N}, St2}
+ end,
+ Fun = #k_fdef{anno=#k{us=[],ns=[],a=A},func=Fname,arity=Arity,
+ vars=Vs ++ Fvs,body=B1},
+ {#k_bif{anno=#k{us=Free,ns=lit_list_vars(Rs),a=A},
+ op=#k_internal{name=make_fun,arity=length(Free)+3},
+ args=[#k_atom{val=Fname},#k_int{val=Arity},
+ #k_int{val=Index},#k_int{val=Uniq}|Fvs],
+ ret=Rs},
+% {#k_call{anno=#k{us=Free,ns=lit_list_vars(Rs),a=A},
+% op=#k_internal{name=make_fun,arity=length(Free)+3},
+% args=[#k_atom{val=Fname},#k_int{val=Arity},
+% #k_int{val=Index},#k_int{val=Uniq}|Fvs],
+% ret=Rs},
+ Free,St3#kern{funs=[Fun|St3#kern.funs]}};
+uexpr(Lit, {break,Rs}, St) ->
+ %% Transform literals to puts here.
+ %%ok = io:fwrite("uexpr ~w:~p~n", [?LINE,Lit]),
+ Used = lit_vars(Lit),
+ {#k_put{anno=#k{us=Used,ns=lit_list_vars(Rs),a=get_kanno(Lit)},
+ arg=Lit,ret=Rs},Used,St}.
+
+%% get_free(Name, Arity, State) -> [Free].
+%% store_free(Name, Arity, [Free], State) -> State.
+
+get_free(F, A, St) ->
+ case orddict:find({F,A}, St#kern.free) of
+ {ok,Val} -> Val;
+ error -> []
+ end.
+
+store_free(F, A, Free, St) ->
+ St#kern{free=orddict:store({F,A}, Free, St#kern.free)}.
+
+break_rets({break,Rs}) -> Rs;
+break_rets(return) -> [].
+
+%% bif_returns(Op, [Ret], State) -> {[Ret],State}.
+
+bif_returns(#k_remote{mod=M,name=N,arity=Ar}, Rs, St0) ->
+ %%ok = io:fwrite("uexpr ~w:~p~n", [?LINE,{M,N,Ar,Rs}]),
+ {Ns,St1} = new_vars(bif_vals(M, N, Ar) - length(Rs), St0),
+ {Rs ++ Ns,St1};
+bif_returns(#k_internal{name=N,arity=Ar}, Rs, St0) ->
+ %%ok = io:fwrite("uexpr ~w:~p~n", [?LINE,{N,Ar,Rs}]),
+ {Ns,St1} = new_vars(bif_vals(N, Ar) - length(Rs), St0),
+ {Rs ++ Ns,St1}.
+
+%% umatch(Match, Break, State) -> {Match,[UsedVar],State}.
+%% Tag a match expression with its used variables.
+
+umatch(#k_alt{anno=A,first=F0,then=T0}, Br, St0) ->
+ {F1,Fu,St1} = umatch(F0, Br, St0),
+ {T1,Tu,St2} = umatch(T0, Br, St1),
+ Used = union(Fu, Tu),
+ {#k_alt{anno=#k{us=Used,ns=[],a=A},first=F1,then=T1},
+ Used,St2};
+umatch(#k_select{anno=A,var=V,types=Ts0}, Br, St0) ->
+ {Ts1,Tus,St1} = umatch_list(Ts0, Br, St0),
+ Used = add_element(V#k_var.name, Tus),
+ {#k_select{anno=#k{us=Used,ns=[],a=A},var=V,types=Ts1},Used,St1};
+umatch(#k_type_clause{anno=A,type=T,values=Vs0}, Br, St0) ->
+ {Vs1,Vus,St1} = umatch_list(Vs0, Br, St0),
+ {#k_type_clause{anno=#k{us=Vus,ns=[],a=A},type=T,values=Vs1},Vus,St1};
+umatch(#k_val_clause{anno=A,val=P,body=B0}, Br, St0) ->
+ {U0,Ps} = pat_vars(P),
+ {B1,Bu,St1} = umatch(B0, Br, St0),
+ Used = union(U0, subtract(Bu, Ps)),
+ {#k_val_clause{anno=#k{us=Used,ns=[],a=A},val=P,body=B1},
+ Used,St1};
+umatch(#k_guard{anno=A,clauses=Gs0}, Br, St0) ->
+ {Gs1,Gus,St1} = umatch_list(Gs0, Br, St0),
+ {#k_guard{anno=#k{us=Gus,ns=[],a=A},clauses=Gs1},Gus,St1};
+umatch(#k_guard_clause{anno=A,guard=G0,body=B0}, Br, St0) ->
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,G0]),
+ {G1,Gu,St1} = uguard(G0, St0),
+ %%ok = io:fwrite("~w: ~p~n", [?LINE,G1]),
+ {B1,Bu,St2} = umatch(B0, Br, St1),
+ Used = union(Gu, Bu),
+ {#k_guard_clause{anno=#k{us=Used,ns=[],a=A},guard=G1,body=B1},Used,St2};
+umatch(B0, Br, St0) -> ubody(B0, Br, St0).
+
+umatch_list(Ms0, Br, St) ->
+ foldr(fun (M0, {Ms1,Us,Sta}) ->
+ {M1,Mu,Stb} = umatch(M0, Br, Sta),
+ {[M1|Ms1],union(Mu, Us),Stb}
+ end, {[],[],St}, Ms0).
+
+%% op_vars(Op) -> [VarName].
+
+op_vars(#k_local{}) -> [];
+op_vars(#k_remote{mod=Mod,name=Name}) ->
+ ordsets:from_list([V || #k_var{name=V} <- [Mod,Name]]);
+op_vars(#k_internal{}) -> [];
+op_vars(Atomic) -> lit_vars(Atomic).
+
+%% lit_vars(Literal) -> [VarName].
+%% Return the variables in a literal.
+
+lit_vars(#k_var{name=N}) -> [N];
+lit_vars(#k_int{}) -> [];
+lit_vars(#k_float{}) -> [];
+lit_vars(#k_atom{}) -> [];
+%%lit_vars(#k_char{}) -> [];
+lit_vars(#k_string{}) -> [];
+lit_vars(#k_nil{}) -> [];
+lit_vars(#k_cons{hd=H,tl=T}) ->
+ union(lit_vars(H), lit_vars(T));
+lit_vars(#k_binary{segs=V}) -> lit_vars(V);
+lit_vars(#k_bin_end{}) -> [];
+lit_vars(#k_bin_seg{size=Size,seg=S,next=N}) ->
+ union(lit_vars(Size), union(lit_vars(S), lit_vars(N)));
+lit_vars(#k_tuple{es=Es}) ->
+ lit_list_vars(Es).
+
+lit_list_vars(Ps) ->
+ foldl(fun (P, Vs) -> union(lit_vars(P), Vs) end, [], Ps).
+
+%% pat_vars(Pattern) -> {[UsedVarName],[NewVarName]}.
+%% Return variables in a pattern. All variables are new variables
+%% except those in the size field of binary segments.
+
+pat_vars(#k_var{name=N}) -> {[],[N]};
+%%pat_vars(#k_char{}) -> {[],[]};
+pat_vars(#k_int{}) -> {[],[]};
+pat_vars(#k_float{}) -> {[],[]};
+pat_vars(#k_atom{}) -> {[],[]};
+pat_vars(#k_string{}) -> {[],[]};
+pat_vars(#k_nil{}) -> {[],[]};
+pat_vars(#k_cons{hd=H,tl=T}) ->
+ pat_list_vars([H,T]);
+pat_vars(#k_binary{segs=V}) ->
+ pat_vars(V);
+pat_vars(#k_bin_seg{size=Size,seg=S,next=N}) ->
+ {U1,New} = pat_list_vars([S,N]),
+ {[],U2} = pat_vars(Size),
+ {union(U1, U2),New};
+pat_vars(#k_bin_end{}) -> {[],[]};
+pat_vars(#k_tuple{es=Es}) ->
+ pat_list_vars(Es).
+
+pat_list_vars(Ps) ->
+ foldl(fun (P, {Used0,New0}) ->
+ {Used,New} = pat_vars(P),
+ {union(Used0, Used),union(New0, New)} end,
+ {[],[]}, Ps).
+
+%% aligned(Bits, Size, Unit, Flags) -> {Size,Flags}
+%% Add 'aligned' to the flags if the current field is aligned.
+%% Number of bits correct modulo 8.
+
+aligned(B, S, U, Fs) when B rem 8 =:= 0 ->
+ {incr_bits(B, S, U),[aligned|Fs]};
+aligned(B, S, U, Fs) ->
+ {incr_bits(B, S, U),Fs}.
+
+incr_bits(B, #k_int{val=S}, U) when integer(B) -> B + S*U;
+incr_bits(_, #k_atom{val=all}, _) -> 0; %Always aligned
+incr_bits(B, _, 8) -> B;
+incr_bits(_, _, _) -> unknown.
+
+make_list(Es) ->
+ foldr(fun (E, Acc) -> #c_cons{hd=E,tl=Acc} end, #c_nil{}, Es).
+
+%% List of integers in interval [N,M]. Empty list if N > M.
+
+integers(N, M) when N =< M ->
+ [N|integers(N + 1, M)];
+integers(_, _) -> [].
+
+%%%
+%%% Handling of warnings.
+%%%
+
+format_error({nomatch_shadow,Line}) ->
+ M = io_lib:format("this clause cannot match because a previous clause at line ~p "
+ "always matches", [Line]),
+ lists:flatten(M);
+format_error(nomatch_shadow) ->
+ "this clause cannot match because a previous clause always matches".
+
+add_warning(none, Term, #kern{ws=Ws}=St) ->
+ St#kern{ws=[{?MODULE,Term}|Ws]};
+add_warning(Line, Term, #kern{ws=Ws}=St) when Line >= 0 ->
+ St#kern{ws=[{Line,?MODULE,Term}|Ws]};
+add_warning(_, _, St) -> St.
+
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.hrl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.hrl
new file mode 100644
index 0000000000..822a9e34e1
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel.hrl
@@ -0,0 +1,77 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: v3_kernel.hrl,v 1.1 2008/12/17 09:53:43 mikpe Exp $
+%%
+
+%% Purpose : Kernel Erlang as records.
+
+%% It would be nice to incorporate some generic functions as well but
+%% this could make including this file difficult.
+%% N.B. the annotation field is ALWAYS the first field!
+
+%% Kernel annotation record.
+-record(k, {us, %Used variables
+ ns, %New variables
+ a}). %Core annotation
+
+%% Literals
+%% NO CHARACTERS YET.
+%%-record(k_char, {anno=[],val}).
+-record(k_int, {anno=[],val}).
+-record(k_float, {anno=[],val}).
+-record(k_atom, {anno=[],val}).
+-record(k_string, {anno=[],val}).
+-record(k_nil, {anno=[]}).
+
+-record(k_tuple, {anno=[],es}).
+-record(k_cons, {anno=[],hd,tl}).
+-record(k_binary, {anno=[],segs}).
+-record(k_bin_seg, {anno=[],size,unit,type,flags,seg,next}).
+-record(k_bin_end, {anno=[]}).
+-record(k_var, {anno=[],name}).
+
+-record(k_local, {anno=[],name,arity}).
+-record(k_remote, {anno=[],mod,name,arity}).
+-record(k_internal, {anno=[],name,arity}).
+
+-record(k_mdef, {anno=[],name,exports,attributes,body}).
+-record(k_fdef, {anno=[],func,arity,vars,body}).
+
+-record(k_seq, {anno=[],arg,body}).
+-record(k_put, {anno=[],arg,ret=[]}).
+-record(k_bif, {anno=[],op,args,ret=[]}).
+-record(k_test, {anno=[],op,args}).
+-record(k_call, {anno=[],op,args,ret=[]}).
+-record(k_enter, {anno=[],op,args}).
+-record(k_receive, {anno=[],var,body,timeout,action,ret=[]}).
+-record(k_receive_accept, {anno=[]}).
+-record(k_receive_next, {anno=[]}).
+-record(k_try, {anno=[],arg,vars,body,evars,handler,ret=[]}).
+-record(k_catch, {anno=[],body,ret=[]}).
+
+-record(k_match, {anno=[],vars,body,ret=[]}).
+-record(k_alt, {anno=[],first,then}).
+-record(k_select, {anno=[],var,types}).
+-record(k_type_clause, {anno=[],type,values}).
+-record(k_val_clause, {anno=[],val,body}).
+-record(k_guard, {anno=[],clauses}).
+-record(k_guard_clause, {anno=[],guard,body}).
+
+-record(k_break, {anno=[],args=[]}).
+-record(k_return, {anno=[],args=[]}).
+
+%%k_get_anno(Thing) -> element(2, Thing).
+%%k_set_anno(Thing, Anno) -> setelement(2, Thing, Anno).
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel_pp.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel_pp.erl
new file mode 100644
index 0000000000..92ff173834
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_kernel_pp.erl
@@ -0,0 +1,444 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: v3_kernel_pp.erl,v 1.1 2008/12/17 09:53:43 mikpe Exp $
+%%
+%% Purpose : Kernel Erlang (naive) prettyprinter
+
+-module(v3_kernel_pp).
+
+-include("v3_kernel.hrl").
+
+-export([format/1]).
+
+%% These are "internal" structures in sys_kernel which are here for
+%% debugging purposes.
+-record(iset, {anno=[],vars,arg,body}).
+-record(ifun, {anno=[],vars,body}).
+
+%% ====================================================================== %%
+%% format(Node) -> Text
+%% Node = coreErlang()
+%% Text = string() | [Text]
+%%
+%% Prettyprint-formats (naively) an abstract Core Erlang syntax
+%% tree.
+
+-record(ctxt, {indent = 0,
+ item_indent = 2,
+ body_indent = 2,
+ tab_width = 8}).
+
+canno(Cthing) -> element(2, Cthing).
+
+format(Node) -> format(Node, #ctxt{}).
+
+format(Node, Ctxt) ->
+ case canno(Node) of
+ [] ->
+ format_1(Node, Ctxt);
+ List ->
+ format_anno(List, Ctxt, fun (Ctxt1) -> format_1(Node, Ctxt1) end)
+ end.
+
+format_anno(Anno, Ctxt, ObjFun) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, 2),
+ ["( ",
+ ObjFun(Ctxt1),
+ nl_indent(Ctxt1),
+ "-| ",io_lib:write(Anno),
+ " )"].
+
+%% format_1(Kexpr, Context) -> string().
+
+format_1(#k_atom{val=A}, _Ctxt) -> core_atom(A);
+%%format_1(#k_char{val=C}, _Ctxt) -> io_lib:write_char(C);
+format_1(#k_float{val=F}, _Ctxt) -> float_to_list(F);
+format_1(#k_int{val=I}, _Ctxt) -> integer_to_list(I);
+format_1(#k_nil{}, _Ctxt) -> "[]";
+format_1(#k_string{val=S}, _Ctxt) -> io_lib:write_string(S);
+format_1(#k_var{name=V}, _Ctxt) ->
+ if atom(V) ->
+ case atom_to_list(V) of
+ [$_|Cs] -> "_X" ++ Cs;
+ [C|Cs] when C >= $A, C =< $Z -> [C|Cs];
+ Cs -> [$_|Cs]
+ end;
+ integer(V) -> [$_|integer_to_list(V)]
+ end;
+format_1(#k_cons{hd=H,tl=T}, Ctxt) ->
+ Txt = ["["|format(H, ctxt_bump_indent(Ctxt, 1))],
+ [Txt|format_list_tail(T, ctxt_bump_indent(Ctxt, width(Txt, Ctxt)))];
+format_1(#k_tuple{es=Es}, Ctxt) ->
+ [${,
+ format_hseq(Es, ",", ctxt_bump_indent(Ctxt, 1), fun format/2),
+ $}
+ ];
+format_1(#k_binary{segs=S}, Ctxt) ->
+ ["#<",format(S, ctxt_bump_indent(Ctxt, 2)),">#"];
+format_1(#k_bin_seg{}=S, Ctxt) ->
+ [format_bin_seg_1(S, Ctxt),
+ format_bin_seg(S#k_bin_seg.next, ctxt_bump_indent(Ctxt, 2))];
+format_1(#k_bin_end{}, _Ctxt) -> "#<>#";
+format_1(#k_local{name=N,arity=A}, Ctxt) ->
+ "local " ++ format_fa_pair({N,A}, Ctxt);
+format_1(#k_remote{mod=M,name=N,arity=A}, _Ctxt) ->
+ %% This is for our internal translator.
+ io_lib:format("remote ~s:~s/~w", [format(M),format(N),A]);
+format_1(#k_internal{name=N,arity=A}, Ctxt) ->
+ "internal " ++ format_fa_pair({N,A}, Ctxt);
+format_1(#k_seq{arg=A,body=B}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, 2),
+ ["do",
+ nl_indent(Ctxt1),
+ format(A, Ctxt1),
+ nl_indent(Ctxt),
+ "then",
+ nl_indent(Ctxt)
+ | format(B, Ctxt)
+ ];
+format_1(#k_match{vars=Vs,body=Bs,ret=Rs}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.item_indent),
+ ["match ",
+ format_hseq(Vs, ",", ctxt_bump_indent(Ctxt, 6), fun format/2),
+ nl_indent(Ctxt1),
+ format(Bs, Ctxt1),
+ nl_indent(Ctxt),
+ "end",
+ format_ret(Rs, Ctxt1)
+ ];
+format_1(#k_alt{first=O,then=T}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.item_indent),
+ ["alt",
+ nl_indent(Ctxt1),
+ format(O, Ctxt1),
+ nl_indent(Ctxt1),
+ format(T, Ctxt1)];
+format_1(#k_select{var=V,types=Cs}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, 2),
+ ["select ",
+ format(V, Ctxt),
+ nl_indent(Ctxt1),
+ format_vseq(Cs, "", "", Ctxt1, fun format/2)
+ ];
+format_1(#k_type_clause{type=T,values=Cs}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["type ",
+ io_lib:write(T),
+ nl_indent(Ctxt1),
+ format_vseq(Cs, "", "", Ctxt1, fun format/2)
+ ];
+format_1(#k_val_clause{val=Val,body=B}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ [format(Val, Ctxt),
+ " ->",
+ nl_indent(Ctxt1)
+ | format(B, Ctxt1)
+ ];
+format_1(#k_guard{clauses=Gs}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, 5),
+ ["when ",
+ nl_indent(Ctxt1),
+ format_vseq(Gs, "", "", Ctxt1, fun format/2)];
+format_1(#k_guard_clause{guard=G,body=B}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ [format(G, Ctxt),
+ nl_indent(Ctxt),
+ "->",
+ nl_indent(Ctxt1)
+ | format(B, Ctxt1)
+ ];
+format_1(#k_call{op=Op,args=As,ret=Rs}, Ctxt) ->
+ Txt = ["call (",format(Op, ctxt_bump_indent(Ctxt, 6)),$)],
+ Ctxt1 = ctxt_bump_indent(Ctxt, 2),
+ [Txt,format_args(As, Ctxt1),
+ format_ret(Rs, Ctxt1)
+ ];
+format_1(#k_enter{op=Op,args=As}, Ctxt) ->
+ Txt = ["enter (",format(Op, ctxt_bump_indent(Ctxt, 7)),$)],
+ Ctxt1 = ctxt_bump_indent(Ctxt, 2),
+ [Txt,format_args(As, Ctxt1)];
+format_1(#k_bif{op=Op,args=As,ret=Rs}, Ctxt) ->
+ Txt = ["bif (",format(Op, ctxt_bump_indent(Ctxt, 5)),$)],
+ Ctxt1 = ctxt_bump_indent(Ctxt, 2),
+ [Txt,format_args(As, Ctxt1),
+ format_ret(Rs, Ctxt1)
+ ];
+format_1(#k_test{op=Op,args=As}, Ctxt) ->
+ Txt = ["test (",format(Op, ctxt_bump_indent(Ctxt, 6)),$)],
+ Ctxt1 = ctxt_bump_indent(Ctxt, 2),
+ [Txt,format_args(As, Ctxt1)];
+format_1(#k_put{arg=A,ret=Rs}, Ctxt) ->
+ [format(A, Ctxt),
+ format_ret(Rs, ctxt_bump_indent(Ctxt, 1))
+ ];
+format_1(#k_try{arg=A,vars=Vs,body=B,evars=Evs,handler=H,ret=Rs}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["try",
+ nl_indent(Ctxt1),
+ format(A, Ctxt1),
+ nl_indent(Ctxt),
+ "of ",
+ format_hseq(Vs, ", ", ctxt_bump_indent(Ctxt, 3), fun format/2),
+ nl_indent(Ctxt1),
+ format(B, Ctxt1),
+ nl_indent(Ctxt),
+ "catch ",
+ format_hseq(Evs, ", ", ctxt_bump_indent(Ctxt, 6), fun format/2),
+ nl_indent(Ctxt1),
+ format(H, Ctxt1),
+ nl_indent(Ctxt),
+ "end",
+ format_ret(Rs, Ctxt1)
+ ];
+format_1(#k_catch{body=B,ret=Rs}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["catch",
+ nl_indent(Ctxt1),
+ format(B, Ctxt1),
+ nl_indent(Ctxt),
+ "end",
+ format_ret(Rs, Ctxt1)
+ ];
+format_1(#k_receive{var=V,body=B,timeout=T,action=A,ret=Rs}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.item_indent),
+ ["receive ",
+ format(V, Ctxt),
+ nl_indent(Ctxt1),
+ format(B, Ctxt1),
+ nl_indent(Ctxt),
+ "after ",
+ format(T, ctxt_bump_indent(Ctxt, 6)),
+ " ->",
+ nl_indent(Ctxt1),
+ format(A, Ctxt1),
+ nl_indent(Ctxt),
+ "end",
+ format_ret(Rs, Ctxt1)
+ ];
+format_1(#k_receive_accept{}, _Ctxt) -> "receive_accept";
+format_1(#k_receive_next{}, _Ctxt) -> "receive_next";
+format_1(#k_break{args=As}, Ctxt) ->
+ ["<",
+ format_hseq(As, ",", ctxt_bump_indent(Ctxt, 1), fun format/2),
+ ">"
+ ];
+format_1(#k_return{args=As}, Ctxt) ->
+ ["<<",
+ format_hseq(As, ",", ctxt_bump_indent(Ctxt, 1), fun format/2),
+ ">>"
+ ];
+format_1(#k_fdef{func=F,arity=A,vars=Vs,body=B}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["fdef ",
+ format_fa_pair({F,A}, ctxt_bump_indent(Ctxt, 5)),
+ format_args(Vs, ctxt_bump_indent(Ctxt, 14)),
+ " =",
+ nl_indent(Ctxt1),
+ format(B, Ctxt1)
+ ];
+format_1(#k_mdef{name=N,exports=Es,attributes=As,body=B}, Ctxt) ->
+ ["module ",
+ format(#k_atom{val=N}, ctxt_bump_indent(Ctxt, 7)),
+ nl_indent(Ctxt),
+ "export [",
+ format_vseq(Es,
+ "", ",",
+ ctxt_bump_indent(Ctxt, 8),
+ fun format_fa_pair/2),
+ "]",
+ nl_indent(Ctxt),
+ "attributes [",
+ format_vseq(As,
+ "", ",",
+ ctxt_bump_indent(Ctxt, 12),
+ fun format_attribute/2),
+ "]",
+ nl_indent(Ctxt),
+ format_vseq(B,
+ "", "",
+ Ctxt,
+ fun format/2),
+ nl_indent(Ctxt)
+ | "end"
+ ];
+%% Internal sys_kernel structures.
+format_1(#iset{vars=Vs,arg=A,body=B}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["set <",
+ format_hseq(Vs, ", ", ctxt_bump_indent(Ctxt, 5), fun format/2),
+ "> =",
+ nl_indent(Ctxt1),
+ format(A, Ctxt1),
+ nl_indent(Ctxt),
+ "in "
+ | format(B, ctxt_bump_indent(Ctxt, 2))
+ ];
+format_1(#ifun{vars=Vs,body=B}, Ctxt) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt, Ctxt#ctxt.body_indent),
+ ["fun ",
+ format_args(Vs, ctxt_bump_indent(Ctxt, 4)),
+ " ->",
+ nl_indent(Ctxt1)
+ | format(B, Ctxt1)
+ ];
+format_1(Type, _Ctxt) ->
+ ["** Unsupported type: ",
+ io_lib:write(Type)
+ | " **"
+ ].
+
+%% format_ret([RetVar], Context) -> Txt.
+%% Format the return vars of kexpr.
+
+format_ret(Rs, Ctxt) ->
+ [" >> ",
+ "<",
+ format_hseq(Rs, ",", ctxt_bump_indent(Ctxt, 5), fun format/2),
+ ">"].
+
+%% format_args([Arg], Context) -> Txt.
+%% Format arguments.
+
+format_args(As, Ctxt) ->
+ [$(,format_hseq(As, ", ", ctxt_bump_indent(Ctxt, 1), fun format/2),$)].
+
+%% format_hseq([Thing], Separator, Context, Fun) -> Txt.
+%% Format a sequence horizontally.
+
+format_hseq([H], _Sep, Ctxt, Fun) ->
+ Fun(H, Ctxt);
+format_hseq([H|T], Sep, Ctxt, Fun) ->
+ Txt = [Fun(H, Ctxt)|Sep],
+ Ctxt1 = ctxt_bump_indent(Ctxt, width(Txt, Ctxt)),
+ [Txt|format_hseq(T, Sep, Ctxt1, Fun)];
+format_hseq([], _, _, _) -> "".
+
+%% format_vseq([Thing], LinePrefix, LineSuffix, Context, Fun) -> Txt.
+%% Format a sequence vertically.
+
+format_vseq([H], _Pre, _Suf, Ctxt, Fun) ->
+ Fun(H, Ctxt);
+format_vseq([H|T], Pre, Suf, Ctxt, Fun) ->
+ [Fun(H, Ctxt),Suf,nl_indent(Ctxt),Pre|
+ format_vseq(T, Pre, Suf, Ctxt, Fun)];
+format_vseq([], _, _, _, _) -> "".
+
+format_fa_pair({F,A}, _Ctxt) -> [core_atom(F),$/,integer_to_list(A)].
+
+%% format_attribute({Name,Val}, Context) -> Txt.
+
+format_attribute({Name,Val}, Ctxt) when list(Val) ->
+ Txt = format(#k_atom{val=Name}, Ctxt),
+ Ctxt1 = ctxt_bump_indent(Ctxt, width(Txt,Ctxt)+4),
+ [Txt," = ",
+ $[,format_vseq(Val, "", ",", Ctxt1,
+ fun (A, _C) -> io_lib:write(A) end),$]
+ ];
+format_attribute({Name,Val}, Ctxt) ->
+ Txt = format(#k_atom{val=Name}, Ctxt),
+ [Txt," = ",io_lib:write(Val)].
+
+format_list_tail(#k_nil{anno=[]}, _Ctxt) -> "]";
+format_list_tail(#k_cons{anno=[],hd=H,tl=T}, Ctxt) ->
+ Txt = [$,|format(H, Ctxt)],
+ Ctxt1 = ctxt_bump_indent(Ctxt, width(Txt, Ctxt)),
+ [Txt|format_list_tail(T, Ctxt1)];
+format_list_tail(Tail, Ctxt) ->
+ ["|",format(Tail, ctxt_bump_indent(Ctxt, 1)), "]"].
+
+format_bin_seg(#k_bin_end{anno=[]}, _Ctxt) -> "";
+format_bin_seg(#k_bin_seg{anno=[],next=N}=Seg, Ctxt) ->
+ Txt = [$,|format_bin_seg_1(Seg, Ctxt)],
+ [Txt|format_bin_seg(N, ctxt_bump_indent(Ctxt, width(Txt, Ctxt)))];
+format_bin_seg(Seg, Ctxt) ->
+ ["|",format(Seg, ctxt_bump_indent(Ctxt, 2))].
+
+format_bin_seg_1(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg}, Ctxt) ->
+ [format(Seg, Ctxt),
+ ":",format(S, Ctxt),"*",io_lib:write(U),
+ ":",io_lib:write(T),
+ lists:map(fun (F) -> [$-,io_lib:write(F)] end, Fs)
+ ].
+
+% format_bin_elements(#k_binary_cons{hd=H,tl=T,size=S,info=I}, Ctxt) ->
+% A = canno(T),
+% Fe = fun (Eh, Es, Ei, Ct) ->
+% [format(Eh, Ct),":",format(Es, Ct),"/",io_lib:write(Ei)]
+% end,
+% case T of
+% #k_zero_binary{} when A == [] ->
+% Fe(H, S, I, Ctxt);
+% #k_binary_cons{} when A == [] ->
+% Txt = [Fe(H, S, I, Ctxt)|","],
+% Ctxt1 = ctxt_bump_indent(Ctxt, width(Txt, Ctxt)),
+% [Txt|format_bin_elements(T, Ctxt1)];
+% _ ->
+% Txt = [Fe(H, S, I, Ctxt)|"|"],
+% [Txt|format(T, ctxt_bump_indent(Ctxt, width(Txt, Ctxt)))]
+% end.
+
+indent(Ctxt) -> indent(Ctxt#ctxt.indent, Ctxt).
+
+indent(N, _Ctxt) when N =< 0 -> "";
+indent(N, Ctxt) ->
+ T = Ctxt#ctxt.tab_width,
+ string:chars($\t, N div T, string:chars($\s, N rem T)).
+
+nl_indent(Ctxt) -> [$\n|indent(Ctxt)].
+
+
+unindent(T, Ctxt) ->
+ unindent(T, Ctxt#ctxt.indent, Ctxt, []).
+
+unindent(T, N, _Ctxt, C) when N =< 0 ->
+ [T|C];
+unindent([$\s|T], N, Ctxt, C) ->
+ unindent(T, N - 1, Ctxt, C);
+unindent([$\t|T], N, Ctxt, C) ->
+ Tab = Ctxt#ctxt.tab_width,
+ if N >= Tab ->
+ unindent(T, N - Tab, Ctxt, C);
+ true ->
+ unindent([string:chars($\s, Tab - N)|T], 0, Ctxt, C)
+ end;
+unindent([L|T], N, Ctxt, C) when list(L) ->
+ unindent(L, N, Ctxt, [T|C]);
+unindent([H|T], _N, _Ctxt, C) ->
+ [H|[T|C]];
+unindent([], N, Ctxt, [H|T]) ->
+ unindent(H, N, Ctxt, T);
+unindent([], _, _, []) -> [].
+
+
+width(Txt, Ctxt) ->
+ width(Txt, 0, Ctxt, []).
+
+width([$\t|T], A, Ctxt, C) ->
+ width(T, A + Ctxt#ctxt.tab_width, Ctxt, C);
+width([$\n|T], _A, Ctxt, C) ->
+ width(unindent([T|C], Ctxt), Ctxt);
+width([H|T], A, Ctxt, C) when list(H) ->
+ width(H, A, Ctxt, [T|C]);
+width([_|T], A, Ctxt, C) ->
+ width(T, A + 1, Ctxt, C);
+width([], A, Ctxt, [H|T]) ->
+ width(H, A, Ctxt, T);
+width([], A, _, []) -> A.
+
+ctxt_bump_indent(Ctxt, Dx) ->
+ Ctxt#ctxt{indent=Ctxt#ctxt.indent + Dx}.
+
+core_atom(A) -> io_lib:write_string(atom_to_list(A), $').
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.erl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.erl
new file mode 100644
index 0000000000..ff210d83f5
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.erl
@@ -0,0 +1,448 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: v3_life.erl,v 1.2 2010/03/04 13:54:20 maria Exp $
+%%
+%% Purpose : Convert annotated kernel expressions to annotated beam format.
+
+%% This module creates beam format annotated with variable lifetime
+%% information. Each thing is given an index and for each variable we
+%% store the first and last index for its occurrence. The variable
+%% database, VDB, attached to each thing is only relevant internally
+%% for that thing.
+%%
+%% For nested things like matches the numbering continues locally and
+%% the VDB for that thing refers to the variable usage within that
+%% thing. Variables which live through a such a thing are internally
+%% given a very large last index. Internally the indexes continue
+%% after the index of that thing. This creates no problems as the
+%% internal variable info never escapes and externally we only see
+%% variable which are alive both before or after.
+%%
+%% This means that variables never "escape" from a thing and the only
+%% way to get values from a thing is to "return" them, with 'break' or
+%% 'return'. Externally these values become the return values of the
+%% thing. This is no real limitation as most nested things have
+%% multiple threads so working out a common best variable usage is
+%% difficult.
+
+-module(v3_life).
+
+-export([module/2]).
+
+-export([vdb_find/2]).
+
+-import(lists, [map/2,foldl/3]).
+-import(ordsets, [add_element/2,intersection/2,union/2,union/1]).
+
+-include("v3_kernel.hrl").
+-include("v3_life.hrl").
+
+%% These are not defined in v3_kernel.hrl.
+get_kanno(Kthing) -> element(2, Kthing).
+%%set_kanno(Kthing, Anno) -> setelement(2, Kthing, Anno).
+
+module(#k_mdef{name=M,exports=Es,attributes=As,body=Fs0}, Opts) ->
+ put(?MODULE, Opts),
+ Fs1 = map(fun function/1, Fs0),
+ erase(?MODULE),
+ {ok,{M,Es,As,Fs1}}.
+
+%% function(Kfunc) -> Func.
+
+function(#k_fdef{func=F,arity=Ar,vars=Vs,body=Kb}) ->
+ %%ok = io:fwrite("life ~w: ~p~n", [?LINE,{F,Ar}]),
+ As = var_list(Vs),
+ Vdb0 = foldl(fun ({var,N}, Vdb) -> new_var(N, 0, Vdb) end, [], As),
+ %% Force a top-level match!
+ B0 = case Kb of
+ #k_match{} -> Kb;
+ _ ->
+ Ka = get_kanno(Kb),
+ #k_match{anno=#k{us=Ka#k.us,ns=[],a=Ka#k.a},
+ vars=Vs,body=Kb,ret=[]}
+ end,
+ {B1,_,Vdb1} = body(B0, 1, Vdb0),
+ {function,F,Ar,As,B1,Vdb1}.
+
+%% body(Kbody, I, Vdb) -> {[Expr],MaxI,Vdb}.
+%% Handle a body, need special cases for transforming match_fails.
+%% We KNOW that they only occur last in a body.
+
+body(#k_seq{arg=#k_put{anno=Pa,arg=Arg,ret=[R]},
+ body=#k_enter{anno=Ea,op=#k_internal{name=match_fail,arity=1},
+ args=[R]}},
+ I, Vdb0) ->
+ Vdb1 = use_vars(Pa#k.us, I, Vdb0), %All used here
+ {[match_fail(Arg, I, Pa#k.a ++ Ea#k.a)],I,Vdb1};
+body(#k_enter{anno=Ea,op=#k_internal{name=match_fail,arity=1},args=[Arg]},
+ I, Vdb0) ->
+ Vdb1 = use_vars(Ea#k.us, I, Vdb0),
+ {[match_fail(Arg, I, Ea#k.a)],I,Vdb1};
+body(#k_seq{arg=Ke,body=Kb}, I, Vdb0) ->
+ %%ok = io:fwrite("life ~w:~p~n", [?LINE,{Ke,I,Vdb0}]),
+ A = get_kanno(Ke),
+ Vdb1 = use_vars(A#k.us, I, new_vars(A#k.ns, I, Vdb0)),
+ {Es,MaxI,Vdb2} = body(Kb, I+1, Vdb1),
+ E = expr(Ke, I, Vdb2),
+ {[E|Es],MaxI,Vdb2};
+body(Ke, I, Vdb0) ->
+ %%ok = io:fwrite("life ~w:~p~n", [?LINE,{Ke,I,Vdb0}]),
+ A = get_kanno(Ke),
+ Vdb1 = use_vars(A#k.us, I, new_vars(A#k.ns, I, Vdb0)),
+ E = expr(Ke, I, Vdb1),
+ {[E],I,Vdb1}.
+
+%% guard(Kguard, I, Vdb) -> Guard.
+
+guard(#k_try{anno=A,arg=Ts,vars=[#k_var{name=X}],body=#k_var{name=X},
+ handler=#k_atom{val=false},ret=Rs}, I, Vdb) ->
+ %% Lock variables that are alive before try and used afterwards.
+ %% Don't lock variables that are only used inside the try expression.
+ Pdb0 = vdb_sub(I, I+1, Vdb),
+ {T,MaxI,Pdb1} = guard_body(Ts, I+1, Pdb0),
+ Pdb2 = use_vars(A#k.ns, MaxI+1, Pdb1), %Save "return" values
+ #l{ke={protected,T,var_list(Rs)},i=I,a=A#k.a,vdb=Pdb2};
+guard(#k_seq{}=G, I, Vdb0) ->
+ {Es,_,Vdb1} = guard_body(G, I, Vdb0),
+ #l{ke={block,Es},i=I,vdb=Vdb1,a=[]};
+guard(G, I, Vdb) -> guard_expr(G, I, Vdb).
+
+%% guard_body(Kbody, I, Vdb) -> {[Expr],MaxI,Vdb}.
+
+guard_body(#k_seq{arg=Ke,body=Kb}, I, Vdb0) ->
+ A = get_kanno(Ke),
+ Vdb1 = use_vars(A#k.us, I, new_vars(A#k.ns, I, Vdb0)),
+ {Es,MaxI,Vdb2} = guard_body(Kb, I+1, Vdb1),
+ E = guard_expr(Ke, I, Vdb2),
+ {[E|Es],MaxI,Vdb2};
+guard_body(Ke, I, Vdb0) ->
+ A = get_kanno(Ke),
+ Vdb1 = use_vars(A#k.us, I, new_vars(A#k.ns, I, Vdb0)),
+ E = guard_expr(Ke, I, Vdb1),
+ {[E],I,Vdb1}.
+
+%% guard_expr(Call, I, Vdb) -> Expr
+
+guard_expr(#k_test{anno=A,op=Op,args=As}, I, _Vdb) ->
+ #l{ke={test,test_op(Op),atomic_list(As)},i=I,a=A#k.a};
+guard_expr(#k_bif{anno=A,op=Op,args=As,ret=Rs}, I, _Vdb) ->
+ #l{ke={bif,bif_op(Op),atomic_list(As),var_list(Rs)},i=I,a=A#k.a};
+guard_expr(#k_put{anno=A,arg=Arg,ret=Rs}, I, _Vdb) ->
+ #l{ke={set,var_list(Rs),literal(Arg)},i=I,a=A#k.a};
+guard_expr(#k_match{anno=A,body=Kb,ret=Rs}, I, Vdb) ->
+ %% Experimental support for andalso/orelse in guards.
+ %% Work out imported variables which need to be locked.
+ Mdb = vdb_sub(I, I+1, Vdb),
+ M = match(Kb, A#k.us, I+1, Mdb),
+ #l{ke={match,M,var_list(Rs)},i=I,vdb=use_vars(A#k.us, I+1, Mdb),a=A#k.a};
+guard_expr(G, I, Vdb) -> guard(G, I, Vdb).
+
+%% expr(Kexpr, I, Vdb) -> Expr.
+
+expr(#k_call{anno=A,op=Op,args=As,ret=Rs}, I, _Vdb) ->
+ #l{ke={call,call_op(Op),atomic_list(As),var_list(Rs)},i=I,a=A#k.a};
+expr(#k_enter{anno=A,op=Op,args=As}, I, _Vdb) ->
+ #l{ke={enter,call_op(Op),atomic_list(As)},i=I,a=A#k.a};
+expr(#k_bif{anno=A,op=Op,args=As,ret=Rs}, I, _Vdb) ->
+ Bif = k_bif(A, Op, As, Rs),
+ #l{ke=Bif,i=I,a=A#k.a};
+expr(#k_match{anno=A,body=Kb,ret=Rs}, I, Vdb) ->
+ %% Work out imported variables which need to be locked.
+ Mdb = vdb_sub(I, I+1, Vdb),
+ M = match(Kb, A#k.us, I+1, Mdb),
+ #l{ke={match,M,var_list(Rs)},i=I,vdb=use_vars(A#k.us, I+1, Mdb),a=A#k.a};
+expr(#k_try{anno=A,arg=Ka,vars=Vs,body=Kb,evars=Evs,handler=Kh,ret=Rs}, I, Vdb) ->
+ %% Lock variables that are alive before the catch and used afterwards.
+ %% Don't lock variables that are only used inside the try.
+ Tdb0 = vdb_sub(I, I+1, Vdb),
+ %% This is the tricky bit. Lock variables in Arg that are used in
+ %% the body and handler. Add try tag 'variable'.
+ Ab = get_kanno(Kb),
+ Ah = get_kanno(Kh),
+ Tdb1 = use_vars(Ab#k.us, I+3, use_vars(Ah#k.us, I+3, Tdb0)),
+ Tdb2 = vdb_sub(I, I+2, Tdb1),
+ Vnames = fun (Kvar) -> Kvar#k_var.name end, %Get the variable names
+ {Aes,_,Adb} = body(Ka, I+2, add_var({catch_tag,I+1}, I+1, 1000000, Tdb2)),
+ {Bes,_,Bdb} = body(Kb, I+4, new_vars(map(Vnames, Vs), I+3, Tdb2)),
+ {Hes,_,Hdb} = body(Kh, I+4, new_vars(map(Vnames, Evs), I+3, Tdb2)),
+ #l{ke={'try',#l{ke={block,Aes},i=I+1,vdb=Adb,a=[]},
+ var_list(Vs),#l{ke={block,Bes},i=I+3,vdb=Bdb,a=[]},
+ var_list(Evs),#l{ke={block,Hes},i=I+3,vdb=Hdb,a=[]},
+ var_list(Rs)},
+ i=I,vdb=Tdb1,a=A#k.a};
+expr(#k_catch{anno=A,body=Kb,ret=[R]}, I, Vdb) ->
+ %% Lock variables that are alive before the catch and used afterwards.
+ %% Don't lock variables that are only used inside the catch.
+ %% Add catch tag 'variable'.
+ Cdb0 = vdb_sub(I, I+1, Vdb),
+ {Es,_,Cdb1} = body(Kb, I+1, add_var({catch_tag,I}, I, 1000000, Cdb0)),
+ #l{ke={'catch',Es,variable(R)},i=I,vdb=Cdb1,a=A#k.a};
+expr(#k_receive{anno=A,var=V,body=Kb,timeout=T,action=Ka,ret=Rs}, I, Vdb) ->
+ %% Work out imported variables which need to be locked.
+ Rdb = vdb_sub(I, I+1, Vdb),
+ M = match(Kb, add_element(V#k_var.name, A#k.us), I+1,
+ new_var(V#k_var.name, I, Rdb)),
+ {Tes,_,Adb} = body(Ka, I+1, Rdb),
+ #l{ke={receive_loop,atomic_lit(T),variable(V),M,
+ #l{ke=Tes,i=I+1,vdb=Adb,a=[]},var_list(Rs)},
+ i=I,vdb=use_vars(A#k.us, I+1, Vdb),a=A#k.a};
+expr(#k_receive_accept{anno=A}, I, _Vdb) ->
+ #l{ke=receive_accept,i=I,a=A#k.a};
+expr(#k_receive_next{anno=A}, I, _Vdb) ->
+ #l{ke=receive_next,i=I,a=A#k.a};
+expr(#k_put{anno=A,arg=Arg,ret=Rs}, I, _Vdb) ->
+ #l{ke={set,var_list(Rs),literal(Arg)},i=I,a=A#k.a};
+expr(#k_break{anno=A,args=As}, I, _Vdb) ->
+ #l{ke={break,atomic_list(As)},i=I,a=A#k.a};
+expr(#k_return{anno=A,args=As}, I, _Vdb) ->
+ #l{ke={return,atomic_list(As)},i=I,a=A#k.a}.
+
+%% call_op(Op) -> Op.
+%% bif_op(Op) -> Op.
+%% test_op(Op) -> Op.
+%% Do any necessary name translations here to munge into beam format.
+
+call_op(#k_local{name=N}) -> N;
+call_op(#k_remote{mod=M,name=N}) -> {remote,atomic_lit(M),atomic_lit(N)};
+call_op(Other) -> variable(Other).
+
+bif_op(#k_remote{mod=#k_atom{val=erlang},name=#k_atom{val=N}}) -> N;
+bif_op(#k_internal{name=N}) -> N.
+
+test_op(#k_remote{mod=#k_atom{val=erlang},name=#k_atom{val=N}}) -> N.
+
+%% k_bif(Anno, Op, [Arg], [Ret]) -> Expr.
+%% Build bifs, do special handling of internal some calls.
+
+k_bif(_A, #k_internal{name=dsetelement,arity=3}, As, []) ->
+ {bif,dsetelement,atomic_list(As),[]};
+k_bif(_A, #k_internal{name=make_fun},
+ [#k_atom{val=Fun},#k_int{val=Arity},
+ #k_int{val=Index},#k_int{val=Uniq}|Free],
+ Rs) ->
+ {bif,{make_fun,Fun,Arity,Index,Uniq},var_list(Free),var_list(Rs)};
+k_bif(_A, Op, As, Rs) ->
+ %% The general case.
+ {bif,bif_op(Op),atomic_list(As),var_list(Rs)}.
+
+%% match(Kexpr, [LockVar], I, Vdb) -> Expr.
+%% Convert match tree to old format.
+
+match(#k_alt{anno=A,first=Kf,then=Kt}, Ls, I, Vdb0) ->
+ Vdb1 = use_vars(union(A#k.us, Ls), I, Vdb0),
+ F = match(Kf, Ls, I+1, Vdb1),
+ T = match(Kt, Ls, I+1, Vdb1),
+ #l{ke={alt,F,T},i=I,vdb=Vdb1,a=A#k.a};
+match(#k_select{anno=A,var=V,types=Kts}, Ls0, I, Vdb0) ->
+ Ls1 = add_element(V#k_var.name, Ls0),
+ Vdb1 = use_vars(union(A#k.us, Ls1), I, Vdb0),
+ Ts = map(fun (Tc) -> type_clause(Tc, Ls1, I+1, Vdb1) end, Kts),
+ #l{ke={select,literal(V),Ts},i=I,vdb=Vdb1,a=A#k.a};
+match(#k_guard{anno=A,clauses=Kcs}, Ls, I, Vdb0) ->
+ Vdb1 = use_vars(union(A#k.us, Ls), I, Vdb0),
+ Cs = map(fun (G) -> guard_clause(G, Ls, I+1, Vdb1) end, Kcs),
+ #l{ke={guard,Cs},i=I,vdb=Vdb1,a=A#k.a};
+match(Other, Ls, I, Vdb0) ->
+ Vdb1 = use_vars(Ls, I, Vdb0),
+ {B,_,Vdb2} = body(Other, I+1, Vdb1),
+ #l{ke={block,B},i=I,vdb=Vdb2,a=[]}.
+
+type_clause(#k_type_clause{anno=A,type=T,values=Kvs}, Ls, I, Vdb0) ->
+ %%ok = io:format("life ~w: ~p~n", [?LINE,{T,Kvs}]),
+ Vdb1 = use_vars(union(A#k.us, Ls), I+1, Vdb0),
+ Vs = map(fun (Vc) -> val_clause(Vc, Ls, I+1, Vdb1) end, Kvs),
+ #l{ke={type_clause,type(T),Vs},i=I,vdb=Vdb1,a=A#k.a}.
+
+val_clause(#k_val_clause{anno=A,val=V,body=Kb}, Ls0, I, Vdb0) ->
+ {_Used,New} = match_pat_vars(V),
+ %% Not clear yet how Used should be used.
+ Bus = (get_kanno(Kb))#k.us,
+ %%ok = io:format("Ls0 = ~p, Used=~p\n New=~p, Bus=~p\n", [Ls0,Used,New,Bus]),
+ Ls1 = union(intersection(New, Bus), Ls0), %Lock for safety
+ Vdb1 = use_vars(union(A#k.us, Ls1), I+1, new_vars(New, I, Vdb0)),
+ B = match(Kb, Ls1, I+1, Vdb1),
+ #l{ke={val_clause,literal(V),B},i=I,vdb=use_vars(Bus, I+1, Vdb1),a=A#k.a}.
+
+guard_clause(#k_guard_clause{anno=A,guard=Kg,body=Kb}, Ls, I, Vdb0) ->
+ Vdb1 = use_vars(union(A#k.us, Ls), I+2, Vdb0),
+ Gdb = vdb_sub(I+1, I+2, Vdb1),
+ G = guard(Kg, I+1, Gdb),
+ B = match(Kb, Ls, I+2, Vdb1),
+ #l{ke={guard_clause,G,B},
+ i=I,vdb=use_vars((get_kanno(Kg))#k.us, I+2, Vdb1),
+ a=A#k.a}.
+
+%% match_fail(FailValue, I, Anno) -> Expr.
+%% Generate the correct match_fail instruction. N.B. there is no
+%% generic case for when the fail value has been created elsewhere.
+
+match_fail(#k_tuple{es=[#k_atom{val=function_clause}|As]}, I, A) ->
+ #l{ke={match_fail,{function_clause,literal_list(As)}},i=I,a=A};
+match_fail(#k_tuple{es=[#k_atom{val=badmatch},Val]}, I, A) ->
+ #l{ke={match_fail,{badmatch,literal(Val)}},i=I,a=A};
+match_fail(#k_tuple{es=[#k_atom{val=case_clause},Val]}, I, A) ->
+ #l{ke={match_fail,{case_clause,literal(Val)}},i=I,a=A};
+match_fail(#k_atom{val=if_clause}, I, A) ->
+ #l{ke={match_fail,if_clause},i=I,a=A};
+match_fail(#k_tuple{es=[#k_atom{val=try_clause},Val]}, I, A) ->
+ #l{ke={match_fail,{try_clause,literal(Val)}},i=I,a=A}.
+
+%% type(Ktype) -> Type.
+
+type(k_int) -> integer;
+type(k_char) -> integer; %Hhhmmm???
+type(k_float) -> float;
+type(k_atom) -> atom;
+type(k_nil) -> nil;
+type(k_cons) -> cons;
+type(k_tuple) -> tuple;
+type(k_binary) -> binary;
+type(k_bin_seg) -> bin_seg;
+type(k_bin_end) -> bin_end.
+
+%% variable(Klit) -> Lit.
+%% var_list([Klit]) -> [Lit].
+
+variable(#k_var{name=N}) -> {var,N}.
+
+var_list(Ks) -> map(fun variable/1, Ks).
+
+%% atomic_lit(Klit) -> Lit.
+%% atomic_list([Klit]) -> [Lit].
+
+atomic_lit(#k_var{name=N}) -> {var,N};
+atomic_lit(#k_int{val=I}) -> {integer,I};
+atomic_lit(#k_float{val=F}) -> {float,F};
+atomic_lit(#k_atom{val=N}) -> {atom,N};
+%%atomic_lit(#k_char{val=C}) -> {char,C};
+%%atomic_lit(#k_string{val=S}) -> {string,S};
+atomic_lit(#k_nil{}) -> nil.
+
+atomic_list(Ks) -> map(fun atomic_lit/1, Ks).
+
+%% literal(Klit) -> Lit.
+%% literal_list([Klit]) -> [Lit].
+
+literal(#k_var{name=N}) -> {var,N};
+literal(#k_int{val=I}) -> {integer,I};
+literal(#k_float{val=F}) -> {float,F};
+literal(#k_atom{val=N}) -> {atom,N};
+%%literal(#k_char{val=C}) -> {char,C};
+literal(#k_string{val=S}) -> {string,S};
+literal(#k_nil{}) -> nil;
+literal(#k_cons{hd=H,tl=T}) ->
+ {cons,[literal(H),literal(T)]};
+literal(#k_binary{segs=V}) ->
+ case proplists:get_bool(no_new_binaries, get(?MODULE)) of
+ true ->
+ {old_binary,literal(V)};
+ false ->
+ {binary,literal(V)}
+ end;
+literal(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=N}) ->
+ {bin_seg,literal(S),U,T,Fs,[literal(Seg),literal(N)]};
+literal(#k_bin_end{}) -> bin_end;
+literal(#k_tuple{es=Es}) ->
+ {tuple,literal_list(Es)}.
+
+literal_list(Ks) -> map(fun literal/1, Ks).
+
+%% match_pat_vars(Pattern) -> {[UsedVarName],[NewVarName]}.
+
+match_pat_vars(#k_var{name=N}) -> {[],[N]};
+match_pat_vars(#k_int{}) -> {[],[]};
+match_pat_vars(#k_float{}) -> {[],[]};
+match_pat_vars(#k_atom{}) -> {[],[]};
+%%match_pat_vars(#k_char{}) -> {[],[]};
+match_pat_vars(#k_string{}) -> {[],[]};
+match_pat_vars(#k_nil{}) -> {[],[]};
+match_pat_vars(#k_cons{hd=H,tl=T}) ->
+ match_pat_list_vars([H,T]);
+match_pat_vars(#k_binary{segs=V}) ->
+ match_pat_vars(V);
+match_pat_vars(#k_bin_seg{size=S,seg=Seg,next=N}) ->
+ {U1,New1} = match_pat_vars(Seg),
+ {U2,New2} = match_pat_vars(N),
+ {[],U3} = match_pat_vars(S),
+ {union([U1,U2,U3]),union(New1, New2)};
+match_pat_vars(#k_bin_end{}) -> {[],[]};
+match_pat_vars(#k_tuple{es=Es}) ->
+ match_pat_list_vars(Es).
+
+match_pat_list_vars(Ps) ->
+ foldl(fun (P, {Used0,New0}) ->
+ {Used,New} = match_pat_vars(P),
+ {union(Used0, Used),union(New0, New)} end,
+ {[],[]}, Ps).
+
+%% new_var(VarName, I, Vdb) -> Vdb.
+%% new_vars([VarName], I, Vdb) -> Vdb.
+%% use_var(VarName, I, Vdb) -> Vdb.
+%% use_vars([VarName], I, Vdb) -> Vdb.
+%% add_var(VarName, F, L, Vdb) -> Vdb.
+
+new_var(V, I, Vdb) ->
+ case vdb_find(V, Vdb) of
+ {V,F,L} when I < F -> vdb_store(V, I, L, Vdb);
+ {V,_,_} -> Vdb;
+ error -> vdb_store(V, I, I, Vdb)
+ end.
+
+new_vars(Vs, I, Vdb0) ->
+ foldl(fun (V, Vdb) -> new_var(V, I, Vdb) end, Vdb0, Vs).
+
+use_var(V, I, Vdb) ->
+ case vdb_find(V, Vdb) of
+ {V,F,L} when I > L -> vdb_store(V, F, I, Vdb);
+ {V,_,_} -> Vdb;
+ error -> vdb_store(V, I, I, Vdb)
+ end.
+
+use_vars(Vs, I, Vdb0) ->
+ foldl(fun (V, Vdb) -> use_var(V, I, Vdb) end, Vdb0, Vs).
+
+add_var(V, F, L, Vdb) ->
+ use_var(V, L, new_var(V, F, Vdb)).
+
+vdb_find(V, Vdb) ->
+ %% Peformance note: Profiling shows that this function accounts for
+ %% a lot of the execution time when huge constants terms are built.
+ %% Using the BIF lists:keysearch/3 is a lot faster than the
+ %% original Erlang version.
+ case lists:keysearch(V, 1, Vdb) of
+ {value,Vd} -> Vd;
+ false -> error
+ end.
+
+%vdb_find(V, [{V1,F,L}=Vd|Vdb]) when V < V1 -> error;
+%vdb_find(V, [{V1,F,L}=Vd|Vdb]) when V == V1 -> Vd;
+%vdb_find(V, [{V1,F,L}=Vd|Vdb]) when V > V1 -> vdb_find(V, Vdb);
+%vdb_find(V, []) -> error.
+
+vdb_store(V, F, L, [{V1,_,_}=Vd|Vdb]) when V > V1 ->
+ [Vd|vdb_store(V, F, L, Vdb)];
+vdb_store(V, F, L, [{V1,_,_}=Vd|Vdb]) when V < V1 -> [{V,F,L},Vd|Vdb];
+vdb_store(V, F, L, [{_V1,_,_}|Vdb]) -> [{V,F,L}|Vdb]; %V == V1
+vdb_store(V, F, L, []) -> [{V,F,L}].
+
+%% vdb_sub(Min, Max, Vdb) -> Vdb.
+%% Extract variables which are used before and after Min. Lock
+%% variables alive after Max.
+
+vdb_sub(Min, Max, Vdb) ->
+ [ if L >= Max -> {V,F,1000000};
+ true -> Vd
+ end || {V,F,L}=Vd <- Vdb, F < Min, L >= Min ].
diff --git a/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.hrl b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.hrl
new file mode 100644
index 0000000000..95adcfcfd8
--- /dev/null
+++ b/lib/dialyzer/test/options1_tests_SUITE_data/src/compiler/v3_life.hrl
@@ -0,0 +1,25 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: v3_life.hrl,v 1.1 2008/12/17 09:53:43 mikpe Exp $
+%%
+%% This record contains variable life-time annotation for a
+%% kernel expression. Added by v3_life, used by v3_codegen.
+
+-record(l, {ke, %Kernel expression
+ i=0, %Op number
+ vdb=[], %Variable database
+ a}). %Core annotation
+
diff --git a/lib/dialyzer/test/options2_tests_SUITE.erl b/lib/dialyzer/test/options2_tests_SUITE.erl
new file mode 100644
index 0000000000..43b5207744
--- /dev/null
+++ b/lib/dialyzer/test/options2_tests_SUITE.erl
@@ -0,0 +1,52 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(options2_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([options2_tests_SUITE_consistency/1, kernel/1]).
+
+suite() ->
+ [{timetrap, {minutes, 1}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, [{defines,[{vsn,4}]},{warnings,[no_return]}]}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [options2_tests_SUITE_consistency,kernel].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+options2_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+kernel(Config) ->
+ case dialyze(Config, kernel) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/options2_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/options2_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..5db2e50d23
--- /dev/null
+++ b/lib/dialyzer/test/options2_tests_SUITE_data/dialyzer_options
@@ -0,0 +1 @@
+{dialyzer_options, [{defines, [{'vsn', 4}]}, {warnings, [no_return]}]}.
diff --git a/lib/dialyzer/test/options2_tests_SUITE_data/results/kernel b/lib/dialyzer/test/options2_tests_SUITE_data/results/kernel
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/options2_tests_SUITE_data/results/kernel
diff --git a/lib/dialyzer/test/options2_tests_SUITE_data/src/kernel/global.erl b/lib/dialyzer/test/options2_tests_SUITE_data/src/kernel/global.erl
new file mode 100644
index 0000000000..1f0e01d074
--- /dev/null
+++ b/lib/dialyzer/test/options2_tests_SUITE_data/src/kernel/global.erl
@@ -0,0 +1,1999 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: global.erl,v 1.4 2009/09/17 09:46:19 kostis Exp $
+%%
+-module(global).
+-behaviour(gen_server).
+
+%% A Global register that allows the global registration of pid's and
+%% name's, that dynamically keeps up to date with the entire network.
+%% global can operate in two modes; in a fully connected network, or
+%% in a non-fully connected network. In the latter case, the name
+%% registration mechanism won't work.
+%%
+
+%% External exports
+-export([start/0, start_link/0, stop/0, sync/0, sync/1,
+ safe_whereis_name/1, whereis_name/1, register_name/2, register_name/3,
+ register_name_external/2, register_name_external/3, unregister_name_external/1,
+ re_register_name/2, re_register_name/3,
+ unregister_name/1, registered_names/0, send/2, node_disconnected/1,
+ set_lock/1, set_lock/2, set_lock/3,
+ del_lock/1, del_lock/2,
+ trans/2, trans/3, trans/4,
+ random_exit_name/3, random_notify_name/3, notify_all_name/3, cnode/3]).
+
+%% Internal exports
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+ code_change/3, timer/2, sync_init/2, init_locker/5, resolve_it/4,
+ init_the_locker/1]).
+
+-export([info/0]).
+
+
+%-define(PRINT(X), erlang:display(X)).
+-define(PRINT(X), true).
+
+%-define(P2(X), erlang:display(X)).
+%-define(P2(X), erlang:display({cs(),X})).
+-define(P2(X), true).
+
+%-define(P1(X), erlang:display(X)).
+-define(P1(X), true).
+
+%-define(P(X), erlang:display(X)).
+-define(P(X), true).
+
+%-define(FORMAT(S, A), format(S, A)).
+-define(FORMAT(S, A), ok).
+
+%%% In certain places in the server, calling io:format hangs everything,
+%%% so we'd better use erlang:display/1.
+% format(S, A) ->
+% erlang:display({format, cs(), S, A}),
+% % io:format(S, A),
+% ok.
+
+% cs() ->
+% {Big, Small, Tiny} = now(),
+% (Small rem 100) * 100 + (Tiny div 10000).
+
+%% Some notes on the internal structure:
+%% One invariant is that the list of locker processes is keyed; i.e.,
+%% there is only one process per neighboring node.
+%% When an item has been stored in the process dictionary, it is not
+%% necessarily cleared when not in use anymore. In other words, it's
+%% not an error if there is already an item there when one is to be
+%% stored.
+
+
+%% This is the protocol version
+%% Vsn 1 is the original protocol.
+%% Vsn 2 is enhanced with code to take care of registration of names from
+%% non erlang nodes, e.g. c-nodes.
+%% Vsn 3 is enhanced with a tag in the synch messages to distinguish
+%% different synch sessions from each other, see OTP-2766.
+%% Note: This requires also that the ticket OTP-2928 is fixed on the nodes
+%% running vsn 1 or 2; if such nodes will coexist with vsn 3 nodes.
+%% Vsn 4 uses a single, permanent, locker process, but works like vsn 3
+%% when communicating with vsn 3 nodes.
+
+%% -define(vsn, 4). %% Now given in options
+
+%%-----------------------------------------------------------------
+%% connect_all = boolean() - true if we are supposed to set up a
+%% fully connected net
+%% known = [Node] - all nodes known to us
+%% synced = [Node] - all nodes that have the same names as us
+%% lockers = [{Node, MyLockerPid}] - the pid of the locker
+%% process for each Node
+%% syncers = [pid()] - all current syncers processes
+%% node_name = atom() - our node name (can change if distribution
+%% is started/stopped dynamically)
+%%
+%% In addition to these, we keep info about messages arrived in
+%% the process dictionary:
+%% {pre_connect, Node} = {Vsn, InitMsg} - init_connect msgs that
+%% arrived before nodeup
+%% {wait_lock, Node} = {exchange, NameList} | lock_is_set
+%% - see comment below (handle_cast)
+%% {save_ops, Node} = [operation()] - save the ops between
+%% exchange and resolved
+%% {prot_vsn, Node} = Vsn - the exchange protocol version
+%% {sync_tag_my, Node} = My tag, used at synchronization with Node
+%% {sync_tag_his, Node} = The Node's tag, used at synchronization
+%%-----------------------------------------------------------------
+-record(state, {connect_all, known = [], synced = [],
+ lockers = [], syncers = [], node_name = node(),
+ the_locker, the_deleter}).
+
+start() -> gen_server:start({local, global_name_server}, global, [], []).
+start_link() -> gen_server:start_link({local, global_name_server},global,[],[]).
+stop() -> gen_server:call(global_name_server, stop, infinity).
+
+sync() ->
+ case check_sync_nodes() of
+ {error, Error} ->
+ {error, Error};
+ SyncNodes ->
+ gen_server:call(global_name_server, {sync, SyncNodes}, infinity)
+ end.
+sync(Nodes) ->
+ case check_sync_nodes(Nodes) of
+ {error, Error} ->
+ {error, Error};
+ SyncNodes ->
+ gen_server:call(global_name_server, {sync, SyncNodes}, infinity)
+ end.
+
+
+send(Name, Msg) ->
+ case whereis_name(Name) of
+ Pid when pid(Pid) ->
+ Pid ! Msg,
+ Pid;
+ undefined ->
+ exit({badarg, {Name, Msg}})
+ end.
+
+%% See OTP-3737. (safe_whereis_name/1 is in fact not used anywhere in OTP.)
+whereis_name(Name) ->
+ where(Name).
+
+safe_whereis_name(Name) ->
+ gen_server:call(global_name_server, {whereis, Name}, infinity).
+
+
+node_disconnected(Node) ->
+ global_name_server ! {nodedown, Node}.
+
+
+%%-----------------------------------------------------------------
+%% Method = function(Name, Pid1, Pid2) -> Pid | Pid2 | none
+%% Method is called if a name conflict is detected when two nodes
+%% are connecting to each other. It is supposed to return one of
+%% the Pids or 'none'. If a pid is returned, that pid is
+%% registered as Name on all nodes. If 'none' is returned, the
+%% Name is unregistered on all nodes. If anything else is returned,
+%% the Name is unregistered as well.
+%% Method is called once at one of the nodes where the processes reside
+%% only. If different Methods are used for the same name, it is
+%% undefined which one of them is used.
+%% Method is blocking, i.e. when it is called, no calls to whereis/
+%% send is let through until it has returned.
+%%-----------------------------------------------------------------
+register_name(Name, Pid) when pid(Pid) ->
+ register_name(Name, Pid, {global, random_exit_name}).
+register_name(Name, Pid, Method) when pid(Pid) ->
+ trans_all_known(fun(Nodes) ->
+ case where(Name) of
+ undefined ->
+ gen_server:multi_call(Nodes,
+ global_name_server,
+ {register, Name, Pid, Method}),
+ yes;
+ _Pid -> no
+ end
+ end).
+
+unregister_name(Name) ->
+ case where(Name) of
+ undefined ->
+ ok;
+ _ ->
+ trans_all_known(fun(Nodes) ->
+ gen_server:multi_call(Nodes,
+ global_name_server,
+ {unregister, Name}),
+ ok
+ end)
+ end.
+
+re_register_name(Name, Pid) when pid(Pid) ->
+ re_register_name(Name, Pid, {global, random_exit_name}).
+re_register_name(Name, Pid, Method) when pid(Pid) ->
+ trans_all_known(fun(Nodes) ->
+ gen_server:multi_call(Nodes,
+ global_name_server,
+ {register, Name, Pid, Method}),
+ yes
+ end).
+
+%% Returns all globally registered names
+registered_names() -> lists:map(fun({Name, _Pid, _Method}) -> Name end,
+ ets:tab2list(global_names)).
+
+%%-----------------------------------------------------------------
+%% An external node (i.e not an erlang node) (un)registers a name.
+%% If the registered Pid crashes the name is to be removed from global.
+%% If the external node crashes the name is to be removed from global.
+%% If the erlang node which registers the name crashes the name is also to be
+%% removed, because the registered process is not supervised any more,
+%% (i.e there is no link to the registered Pid).
+%%-----------------------------------------------------------------
+register_name_external(Name, Pid) when pid(Pid) ->
+ register_name_external(Name, Pid, {global, random_exit_name}).
+register_name_external(Name, Pid, Method) when pid(Pid) ->
+ trans_all_known(fun(Nodes) ->
+ case where(Name) of
+ undefined ->
+ gen_server:multi_call(Nodes,
+ global_name_server,
+ {register, Name, Pid, Method}),
+ gen_server:multi_call(Nodes,
+ global_name_server,
+ {register_ext, Name, Pid, node()}),
+ yes;
+ _Pid -> no
+ end
+ end).
+
+
+
+
+unregister_name_external(Name) ->
+ case where(Name) of
+ undefined ->
+ ok;
+ _ ->
+ trans_all_known(fun(Nodes) ->
+ gen_server:multi_call(Nodes,
+ global_name_server,
+ {unregister, Name}),
+ gen_server:multi_call(Nodes,
+ global_name_server,
+ {unregister_ext, Name}),
+ ok
+ end)
+ end.
+
+
+
+
+
+%%-----------------------------------------------------------------
+%% Args: Id = id()
+%% Nodes = [node()]
+%% id() = {ResourceId, LockRequesterId}
+%% Retries = infinity | int() > 0
+%% Purpose: Sets a lock on the specified nodes (or all nodes if
+%% none are specified) on ResourceId for LockRequesterId. If there
+%% already exists a lock on ResourceId for another owner
+%% than LockRequesterId, false is returned, otherwise true.
+%% Returns: boolean()
+%%-----------------------------------------------------------------
+set_lock(Id) ->
+ set_lock(Id, [node() | nodes()], infinity, 1).
+set_lock(Id, Nodes) ->
+ set_lock(Id, Nodes, infinity, 1).
+set_lock(Id, Nodes, Retries) when Retries > 0 ->
+ set_lock(Id, Nodes, Retries, 1);
+set_lock(Id, Nodes, infinity) ->
+ set_lock(Id, Nodes, infinity, 1).
+set_lock(_Id, _Nodes, 0, _) -> false;
+set_lock({ResourceId, LockRequesterId}, Nodes, Retries, Times) ->
+ Id = {ResourceId, LockRequesterId},
+ Msg = {set_lock, Id},
+ {Replies, _} =
+ gen_server:multi_call(Nodes, global_name_server, Msg),
+ ?P2({set_lock, node(), self(), {ResourceId, LockRequesterId},
+ Nodes, Retries, Times, Replies, catch erlang:error(kaka)}),
+ ?P({set_lock, node(), ResourceId,
+ {LockRequesterId, node(LockRequesterId)}}),
+ case check_replies(Replies, Id, Nodes) of
+ true -> ?P({set_lock_true, node(), ResourceId}),
+ true;
+ false ->
+ random_sleep(Times),
+ set_lock(Id, Nodes, dec(Retries), Times+1);
+ N when integer(N) ->
+ ?P({sleeping, N}),
+ timer:sleep(N*500),
+ set_lock(Id, Nodes, Retries, Times);
+ Pid when pid(Pid) ->
+ ?P({waiting_for, Pid}),
+ Ref = erlang:monitor(process, Pid),
+ receive
+ {'DOWN', Ref, process, Pid, _Reason} ->
+ ?P({waited_for, Pid, _Reason}),
+ set_lock(Id, Nodes, Retries, Times)
+ end
+ end.
+
+check_replies([{_Node, true} | T], Id, Nodes) ->
+ check_replies(T, Id, Nodes);
+check_replies([{_Node, Status} | _T], Id, Nodes) ->
+ gen_server:multi_call(Nodes, global_name_server, {del_lock, Id}),
+ Status;
+check_replies([], _Id, _Nodes) ->
+ true.
+
+del_lock(Id) ->
+ del_lock(Id, [node() | nodes()]).
+del_lock({ResourceId, LockRequesterId}, Nodes) ->
+ Id = {ResourceId, LockRequesterId},
+ ?P2({del_lock, node(), self(), ResourceId, LockRequesterId, Nodes}),
+ gen_server:multi_call(Nodes, global_name_server, {del_lock, Id}),
+ true.
+
+%%-----------------------------------------------------------------
+%% Args: Id = id()
+%% Fun = fun() | {M,F}
+%% Nodes = [node()]
+%% Retries = infinity | int() > 0
+%% Purpose: Sets a lock on Id (as set_lock), and evaluates
+%% Res = Fun() on success.
+%% Returns: Res | aborted (note, if Retries is infinity, the
+%% transaction won't abort)
+%%-----------------------------------------------------------------
+trans(Id, Fun) -> trans(Id, Fun, [node() | nodes()], infinity).
+trans(Id, Fun, Nodes) -> trans(Id, Fun, Nodes, infinity).
+trans(_Id, _Fun, _Nodes, 0) -> aborted;
+trans(Id, Fun, Nodes, Retries) ->
+ case set_lock(Id, Nodes, Retries) of
+ true ->
+ case catch Fun() of
+ {'EXIT', R} ->
+ del_lock(Id, Nodes),
+ exit(R);
+ Res ->
+ del_lock(Id, Nodes),
+ Res
+ end;
+ false ->
+ aborted
+ end.
+
+%%% Similar to trans(Id, Fun), but always uses global's own lock,
+%%% on all nodes known to global, making sure that no new nodes have
+%%% become known while we got the list of known nodes.
+trans_all_known(F) ->
+ Id = {global, self()},
+ Nodes = [node() | gen_server:call(global_name_server, get_known)],
+ case set_lock(Id, Nodes) of
+ true ->
+ Nodes2 = [node() | gen_server:call(global_name_server, get_known)],
+ case Nodes2 -- Nodes of
+ [] ->
+ case catch F(Nodes2) of
+ {'EXIT', R} ->
+ del_lock(Id, Nodes2),
+ exit(R);
+ Res ->
+ del_lock(Id, Nodes2),
+ Res
+ end;
+ _ ->
+ del_lock(Id, Nodes),
+ trans_all_known(F)
+ end;
+ false ->
+ aborted
+ end.
+
+info() ->
+ gen_server:call(global_name_server, info).
+
+%%%-----------------------------------------------------------------
+%%% Call-back functions from gen_server
+%%%-----------------------------------------------------------------
+init([]) ->
+ process_flag(trap_exit, true),
+ ets:new(global_locks, [set, named_table, protected]),
+ ets:new(global_names, [set, named_table, protected]),
+ ets:new(global_names_ext, [set, named_table, protected]),
+
+ %% multi
+ S = #state{the_locker = start_the_locker(self()),
+ the_deleter = start_the_deleter(self())},
+
+ case init:get_argument(connect_all) of
+ {ok, [["false"]]} ->
+ {ok, S#state{connect_all = false}};
+ _ ->
+ {ok, S#state{connect_all = true}}
+ end.
+
+%%-----------------------------------------------------------------
+%% Connection algorithm
+%% ====================
+%% This alg solves the problem with partitioned nets as well.
+%%
+%% The main idea in the alg is that when two nodes connect, they
+%% try to set a lock in their own partition (i.e. all nodes already
+%% known to them). When the lock is set in each partition, these
+%% two nodes send each other a list with all registered names in
+%% resp partition(*). If no conflict is found, the name tables are
+%% just updated. If a conflict is found, a resolve function is
+%% called once for each conflict. The result of the resolving
+%% is sent to the other node. When the names are exchanged, all
+%% other nodes in each partition are informed of the other nodes,
+%% and they ping each other to form a fully connected net.
+%%
+%% Here's the flow:
+%% Suppose nodes A and B connect, and C is connected to A.
+%%
+%% Node A
+%% ------
+%% << {nodeup, B}
+%% [spawn locker]
+%% B ! {init_connect, MyLocker}
+%% << {init_connect, MyLocker}
+%% [The lockers try to set the lock]
+%% << {lock_is_set, B}
+%% [Now, lock is set in both partitions]
+%% B ! {exchange, Names}
+%% << {exchange, Names}
+%% [solve conflict]
+%% B ! {resolved, Resolved}
+%% << {resolved, Resolved}
+%% C ! {new_nodes, Resolved, [B]}
+%%
+%% Node C
+%% ------
+%% << {new_nodes, ResolvedOps, NewNodes}
+%% [insert Ops]
+%% ping(NewNodes)
+%% << {nodeup, B}
+%% <ignore this one>
+%%
+%% Several things can disturb this picture.
+%%
+%% First, the got_names message may arrive *before* the nodeup
+%% message, due to delay in net_kernel and an optimisation in the
+%% emulator. We handle this by keeping track of these messages in the
+%% pre_connect and lockers variables in our state.
+%%
+%% The most common situation is when a new node connects to an
+%% existing net. In this case there's no need to set the lock on
+%% all nodes in the net, as we know that there won't be any conflict.
+%% This is optimised by sending {first_contact, Node} instead of got_names.
+%% This implies that first_contact may arrive before nodeup as well.
+%%
+%% Of course we must handle that some node goes down during the
+%% connection.
+%%
+%% (*) When this information is being exchanged, no one is allowed
+%% to change the global register table. All calls to register etc
+%% are protected by a lock. If a registered process dies
+%% during this phase, the deregistration is done as soon as possible
+%% on each node (i.e. when the info about the process has arrived).
+%%-----------------------------------------------------------------
+%% Messages in the protocol
+%% ========================
+%% 1. Between connecting nodes (gen_server:casts)
+%% {init_connect, Vsn, Node, InitMsg}
+%% InitMsg = {locker, LockerPid}
+%% {exchange, Node, ListOfNames}
+%% {resolved, Node, Ops, Known}
+%% Known = list of nodes in Node's partition
+%% 2. Between lockers on connecting nodes (!s)
+%% {his_locker, Pid} (from our global)
+%% lockers link to each other
+%% {lock, Bool} loop until both lockers have lock = true,
+%% then send to global {lock_is_set, Node}
+%% 3. From connecting node to other nodes in the partition
+%% {new_nodes, Node, Ops, NewNodes}
+%% 4. sync protocol
+%% {in_sync, Node, IsKnown}
+%% - sent by each node to all new nodes
+%%-----------------------------------------------------------------
+
+handle_call({whereis, Name}, From, S) ->
+ do_whereis(Name, From),
+ {noreply, S};
+
+handle_call({register, Name, Pid, Method}, _From, S) ->
+ ?P2({register, node(), Name}),
+ ins_name(Name, Pid, Method),
+ {reply, yes, S};
+
+handle_call({unregister, Name}, _From, S) ->
+ case ets:lookup(global_names, Name) of
+ [{_, Pid, _}] ->
+ ?P2({unregister, node(), Name, Pid, node(Pid)}),
+ ets:delete(global_names, Name),
+ dounlink(Pid);
+ _ -> ok
+ end,
+ {reply, ok, S};
+
+handle_call({register_ext, Name, Pid, RegNode}, _F, S) ->
+ ins_name_ext(Name, Pid, RegNode),
+ {reply, yes, S};
+
+handle_call({unregister_ext, Name}, _From, S) ->
+ ets:delete(global_names_ext, Name),
+ {reply, ok, S};
+
+
+handle_call({set_lock, Lock}, {Pid, _Tag}, S) ->
+ Reply = handle_set_lock(Lock, Pid),
+ {reply, Reply, S};
+
+handle_call({del_lock, Lock}, {Pid, _Tag}, S) ->
+ handle_del_lock(Lock, Pid),
+ {reply, true, S};
+
+handle_call(get_known, _From, S) ->
+ {reply, S#state.known, S};
+
+%% R7 may call us?
+handle_call(get_known_v2, _From, S) ->
+ {reply, S#state.known, S};
+
+handle_call({sync, Nodes}, From, S) ->
+ %% If we have several global groups, this won't work, since we will
+ %% do start_sync on a nonempty list of nodes even if the system
+ %% is quiet.
+ Pid = start_sync(lists:delete(node(), Nodes) -- S#state.synced, From),
+ {noreply, S#state{syncers = [Pid | S#state.syncers]}};
+
+handle_call(get_protocol_version, _From, S) ->
+ {reply, ?vsn, S};
+
+handle_call(get_names_ext, _From, S) ->
+ {reply, get_names_ext(), S};
+
+handle_call(info, _From, S) ->
+ {reply, S, S};
+
+handle_call(stop, _From, S) ->
+ {stop, normal, stopped, S}.
+
+
+%%=======================================================================================
+%% init_connect
+%%
+%% Vsn 1 is the original protocol.
+%% Vsn 2 is enhanced with code to take care of registration of names from
+%% non erlang nodes, e.g. c-nodes.
+%% Vsn 3 is enhanced with a tag in the synch messages to distinguish
+%% different synch sessions from each other, see OTP-2766.
+%% Note: This requires also that the ticket OTP-2928 is fixed on the nodes
+%% running vsn 1 or 2; if such nodes will coexist with vsn 3 nodes.
+%%=======================================================================================
+handle_cast({init_connect, Vsn, Node, InitMsg}, S) ->
+ ?FORMAT("~p #### init_connect Vsn ~p, Node ~p, InitMsg ~p~n",[node(), Vsn, Node, InitMsg]),
+ case Vsn of
+ %% It is always the responsibility of newer versions to understand
+ %% older versions of the protocol.
+ {HisVsn, HisTag} when HisVsn > ?vsn ->
+ init_connect(?vsn, Node, InitMsg, HisTag, S#state.lockers, S);
+ {HisVsn, HisTag} ->
+ init_connect(HisVsn, Node, InitMsg, HisTag, S#state.lockers, S);
+ %% To be future compatible
+ Tuple when tuple(Tuple) ->
+ List = tuple_to_list(Tuple),
+ [_HisVsn, HisTag | _] = List,
+ %% use own version handling if his is newer.
+ init_connect(?vsn, Node, InitMsg, HisTag, S#state.lockers, S);
+ _ when Vsn < 3 ->
+ init_connect(Vsn, Node, InitMsg, undef, S#state.lockers, S);
+ _ ->
+ Txt = io_lib:format("Illegal global protocol version ~p Node: ~p",[Vsn, Node]),
+ error_logger:info_report(lists:flatten(Txt))
+ end,
+ {noreply, S};
+
+%%=======================================================================================
+%% lock_is_set
+%%
+%% Ok, the lock is now set on both partitions. Send our names to other node.
+%%=======================================================================================
+handle_cast({lock_is_set, Node, MyTag}, S) ->
+ ?FORMAT("~p #### lock_is_set Node ~p~n",[node(), Node]),
+ Sync_tag_my = get({sync_tag_my, Node}),
+ PVsn = get({prot_vsn, Node}),
+ ?P2({lock_is_set, node(), Node, {MyTag, PVsn}, Sync_tag_my}),
+ case {MyTag, PVsn} of
+ {Sync_tag_my, undefined} ->
+ %% Patch for otp-2728, the connection to the Node is flipping up and down
+ %% the messages from the 'older' sync tries can disturb the 'new' sync try
+ %% therefor all messages are discarded if the protocol vsn is not defined.
+ Txt = io_lib:format("undefined global protocol version Node: ~p",[Node]),
+ error_logger:info_report(lists:flatten(Txt)),
+ {noreply, S};
+ {Sync_tag_my, _} ->
+ %% Check that the Node is still not known
+ case lists:member(Node, S#state.known) of
+ false ->
+ ?P2({lset, node(), Node, false}),
+ lock_is_set(Node, S#state.known),
+ {noreply, S};
+ true ->
+ ?P2({lset, node(), Node, true}),
+ erase({wait_lock, Node}),
+ NewS = cancel_locker(Node, S),
+ {noreply, NewS}
+ end;
+ _ ->
+ ?P2({lset, illegal, node(), Node}),
+ %% Illegal tag, delete the locker.
+ erase({wait_lock, Node}),
+ NewS = cancel_locker(Node, S),
+ {noreply, NewS}
+ end;
+
+%%=======================================================================================
+%% exchange
+%%
+%% Here the names are checked to detect name clashes.
+%%=======================================================================================
+%% Vsn 3 of the protocol
+handle_cast({exchange, Node, NameList, NameExtList, MyTag}, S) ->
+ ?FORMAT("~p #### handle_cast 3 lock_is_set exchange ~p~n",
+ [node(),{Node, NameList, NameExtList, MyTag}]),
+ Sync_tag_my = get({sync_tag_my, Node}),
+ PVsn = get({prot_vsn, Node}),
+ case {MyTag, PVsn} of
+ {Sync_tag_my, undefined} ->
+ %% Patch for otp-2728, the connection to the Node is flipping up and down
+ %% the messages from the 'older' sync tries can disturb the 'new' sync try
+ %% therefor all messages are discarded if the protocol vsn is not defined.
+ Txt = lists:flatten(io_lib:format(
+ "undefined global protocol version Node: ~p",[Node])),
+ error_logger:info_report(Txt),
+ {noreply, S};
+ {Sync_tag_my, _} ->
+ exchange(PVsn, Node, {NameList, NameExtList}, S#state.known),
+ {noreply, S};
+ _ ->
+ %% Illegal tag, delete the locker.
+ erase({wait_lock, Node}),
+ NewS = cancel_locker(Node, S),
+ {noreply, NewS}
+ end;
+
+
+
+%%=======================================================================================
+%% resolved
+%%
+%% Here the name clashes are resolved.
+%%=======================================================================================
+%% Vsn 3 of the protocol
+handle_cast({resolved, Node, Resolved, HisKnown, _HisKnown_v2, Names_ext, MyTag}, S) ->
+ ?FORMAT("~p #### 2 resolved ~p~n",[node(),{Node, Resolved, HisKnown, Names_ext}]),
+ Sync_tag_my = get({sync_tag_my, Node}),
+ PVsn = get({prot_vsn, Node}),
+ case {MyTag, PVsn} of
+ {Sync_tag_my, undefined} ->
+ %% Patch for otp-2728, the connection to the Node is flipping up and down
+ %% the messages from the 'older' sync tries can disturb the 'new' sync try
+ %% therefor all messages are discarded if the protocol vsn is not defined.
+ Txt = lists:flatten(io_lib:format(
+ "undefined global protocol version Node: ~p",[Node])),
+ error_logger:info_report(Txt),
+ {noreply, S};
+ {Sync_tag_my, _} ->
+ NewS = resolved(Node, Resolved, {HisKnown, HisKnown}, Names_ext, S),
+ {noreply, NewS};
+ _ ->
+ %% Illegal tag, delete the locker.
+ erase({wait_lock, Node}),
+ NewS = cancel_locker(Node, S),
+ {noreply, NewS}
+ end;
+
+
+
+
+
+
+%%=======================================================================================
+%% new_nodes
+%%
+%% We get to know the other node's known nodes.
+%%=======================================================================================
+%% Vsn 2 and 3 of the protocol
+handle_cast({new_nodes, _Node, Ops, Names_ext, Nodes, _Nodes_v2}, S) ->
+ ?P2({new_nodes, node(), Nodes}),
+ ?FORMAT("~p #### 2 new_nodes ~p~n",[node(),{Ops, Names_ext, Nodes}]),
+ NewS = new_nodes(Ops, Names_ext, Nodes, S),
+ {noreply, NewS};
+
+
+
+
+%%=======================================================================================
+%% in_sync
+%%
+%% We are in sync with this node (from the other node's known world).
+%%=======================================================================================
+handle_cast({in_sync, Node, IsKnown}, S) ->
+ ?FORMAT("~p #### in_sync ~p~n",[node(),{Node, IsKnown}]),
+ lists:foreach(fun(Pid) -> Pid ! {synced, [Node]} end, S#state.syncers),
+ %% moved up:
+ NewS = cancel_locker(Node, S),
+ erase({wait_lock, Node}),
+ erase({pre_connect, Node}),
+ erase({sync_tag_my, Node}),
+ erase({sync_tag_his, Node}),
+ NKnown = case lists:member(Node, Known = NewS#state.known) of
+ false when IsKnown == true ->
+ gen_server:cast({global_name_server, Node},
+ {in_sync, node(), false}),
+ [Node | Known];
+ _ ->
+ Known
+ end,
+ NSynced = case lists:member(Node, Synced = NewS#state.synced) of
+ true -> Synced;
+ false -> [Node | Synced]
+ end,
+ {noreply, NewS#state{known = NKnown, synced = NSynced}};
+
+
+
+
+%% Called when Pid on other node crashed
+handle_cast({async_del_name, Name, Pid}, S) ->
+ ?P2({async_del_name, node(), Name, Pid, node(Pid)}),
+ case ets:lookup(global_names, Name) of
+ [{Name, Pid, _}] ->
+ ets:delete(global_names, Name),
+ dounlink(Pid);
+ _ -> ok
+ end,
+ ets:delete(global_names_ext, Name),
+ {noreply, S};
+
+handle_cast({async_del_lock, _ResourceId, Pid}, S) ->
+ del_locks2(ets:tab2list(global_locks), Pid),
+% ets:match_delete(global_locks, {ResourceId, '_', Pid}),
+ {noreply, S}.
+
+
+handle_info({'EXIT', Deleter, _Reason}=Exit, #state{the_deleter=Deleter}=S) ->
+ {stop, {deleter_died,Exit}, S#state{the_deleter=undefined}};
+handle_info({'EXIT', Pid, _Reason}, #state{the_deleter=Deleter}=S)
+ when pid(Pid) ->
+ ?P2({global, exit, node(), Pid, node(Pid)}),
+ check_exit(Deleter, Pid),
+ Syncers = lists:delete(Pid, S#state.syncers),
+ Lockers = lists:keydelete(Pid, 2, S#state.lockers),
+ ?PRINT({exit, Pid, lockers, node(), S#state.lockers}),
+ {noreply, S#state{syncers = Syncers, lockers = Lockers}};
+
+handle_info({nodedown, Node}, S) when Node == S#state.node_name ->
+ %% Somebody stopped the distribution dynamically - change
+ %% references to old node name (Node) to new node name ('nonode@nohost')
+ {noreply, change_our_node_name(node(), S)};
+
+handle_info({nodedown, Node}, S) ->
+ ?FORMAT("~p #### nodedown 1 ####### Node ~p",[node(),Node]),
+ %% moved up:
+ do_node_down(Node),
+ #state{known = Known, synced = Syncs} = S,
+ NewS = cancel_locker(Node, S),
+
+ erase({wait_lock, Node}),
+ erase({save_ops, Node}),
+ erase({pre_connect, Node}),
+ erase({prot_vsn, Node}),
+ erase({sync_tag_my, Node}),
+ erase({sync_tag_his, Node}),
+ {noreply, NewS#state{known = lists:delete(Node, Known),
+ synced = lists:delete(Node, Syncs)}};
+
+
+
+handle_info({nodeup, Node}, S) when Node == node() ->
+ ?FORMAT("~p #### nodeup S ####### Node ~p~n",[node(), Node]),
+ %% Somebody started the distribution dynamically - change
+ %% references to old node name ('nonode@nohost') to Node.
+ {noreply, change_our_node_name(Node, S)};
+
+handle_info({nodeup, Node}, S) when S#state.connect_all == true ->
+ ?FORMAT("~p #### nodeup 1 ####### Node ~p",[node(),Node]),
+ IsKnown = lists:member(Node, S#state.known) or
+ %% This one is only for double nodeups (shouldn't occur!)
+ lists:keymember(Node, 1, S#state.lockers),
+ case IsKnown of
+ true ->
+ {noreply, S};
+ false ->
+ %% now() is used as a tag to separate different sycnh sessions
+ %% from each others. Global could be confused at bursty nodeups
+ %% because it couldn't separate the messages between the different
+ %% synch sessions started by a nodeup.
+ MyTag = now(),
+ resend_pre_connect(Node),
+
+ %% multi
+ S#state.the_locker ! {nodeup, Node, S#state.known, MyTag, self()},
+
+ Pid = start_locker(Node, S#state.known, MyTag, self(), S#state.the_locker),
+ Ls = S#state.lockers,
+ InitC = {init_connect, {?vsn, MyTag}, node(), {locker, Pid, S#state.known}},
+ ?P2({putting, MyTag}),
+ put({sync_tag_my, Node}, MyTag),
+ gen_server:cast({global_name_server, Node}, InitC),
+ {noreply, S#state{lockers = [{Node, Pid} | Ls]}}
+ end;
+
+
+%% This message is only to test otp-2766 Global may be confused at bursty
+%% nodeup/nodedowns. It's a copy of the complex part of the handling of
+%% the 'nodeup' message.
+handle_info({test_vsn_tag_nodeup, Node}, S) when S#state.connect_all == true,
+ Node == node() ->
+ {noreply, S};
+handle_info({test_vsn_tag_nodeup, Node}, S) when S#state.connect_all == true ->
+ ?FORMAT("~p #### test_nodeup 1 ####### Node ~p~n",[node(), Node]),
+ MyTag = now(),
+ resend_pre_connect(Node),
+ S#state.the_locker ! {nodeup, Node, S#state.known, MyTag, self()},
+ Pid = start_locker(Node, S#state.known, MyTag, self(), S#state.the_locker),
+ Ls = S#state.lockers,
+ InitC = {init_connect, {?vsn, MyTag}, node(), {locker, Pid, S#state.known}},
+ put({sync_tag_my, Node}, MyTag),
+ gen_server:cast({global_name_server, Node}, InitC),
+ ?PRINT({lockers, node(), Ls}),
+ {noreply, S#state{lockers = [{Node, Pid} | Ls]}};
+
+
+handle_info({whereis, Name, From}, S) ->
+ do_whereis(Name, From),
+ {noreply, S};
+
+handle_info(known, S) ->
+ io:format(">>>> ~p~n",[S#state.known]),
+ {noreply, S};
+
+handle_info(_, S) ->
+ {noreply, S}.
+
+
+
+
+%%=======================================================================================
+%%=======================================================================================
+%%=============================== Internal Functions ====================================
+%%=======================================================================================
+%%=======================================================================================
+
+
+
+%%=======================================================================================
+%% Another node wants to synchronize its registered names with us.
+%% Start a locker process. Both nodes must have a lock before they are
+%% allowed to continue.
+%%=======================================================================================
+init_connect(Vsn, Node, InitMsg, HisTag, Lockers, S) ->
+ ?P2({init_connect, node(), Node}),
+ ?FORMAT("~p #### init_connect Vsn, Node, InitMsg ~p~n",[node(),{Vsn, Node, InitMsg}]),
+ %% It is always the responsibility of newer versions to understand
+ %% older versions of the protocol.
+ put({prot_vsn, Node}, Vsn),
+ put({sync_tag_his, Node}, HisTag),
+ if
+ Vsn =< 3 ->
+ case lists:keysearch(Node, 1, Lockers) of
+ {value, {_Node, MyLocker}} ->
+ %% We both have lockers; let them set the lock
+ case InitMsg of
+ {locker, HisLocker, HisKnown} -> %% current version
+ ?PRINT({init_connect1, node(), self(), Node,
+ MyLocker, HisLocker}),
+ MyLocker ! {his_locker, HisLocker, HisKnown};
+
+ {locker, _HisLocker, HisKnown, HisTheLocker} -> %% multi
+ ?PRINT({init_connect1, node(), self(), Node,
+ MyLocker, _HisLocker}),
+ S#state.the_locker ! {his_the_locker, HisTheLocker,
+ HisKnown, S#state.known}
+ end;
+ false ->
+ ?PRINT({init_connect11, node(), self(), Node}),
+ put({pre_connect, Node}, {Vsn, InitMsg, HisTag})
+ end;
+ true -> % Vsn > 3
+ ?P2(vsn4),
+ case lists:keysearch(Node, 1, Lockers) of
+ {value, {_Node, _MyLocker}} ->
+ %% We both have lockers; let them set the lock
+ case InitMsg of
+ {locker, HisLocker, HisKnown} -> %% current version
+ ?PRINT({init_connect1, node(), self(), Node,
+ _MyLocker, HisLocker}),
+ HisLocker ! {his_locker_new, S#state.the_locker,
+ {HisKnown, S#state.known}};
+
+ {locker, _HisLocker, HisKnown, HisTheLocker} -> %% multi
+ ?PRINT({init_connect1, node(), self(), Node,
+ _MyLocker, _HisLocker}),
+ S#state.the_locker ! {his_the_locker, HisTheLocker,
+ HisKnown, S#state.known}
+ end;
+ false ->
+ ?PRINT({init_connect11, node(), self(), Node}),
+ put({pre_connect, Node}, {Vsn, InitMsg, HisTag})
+ end
+ end.
+
+
+
+%%=======================================================================================
+%% In the simple case, we'll get lock_is_set before we get exchange,
+%% but we may get exchange before we get lock_is_set from our locker.
+%% If that's the case, we'll have to remember the exchange info, and
+%% handle it when we get the lock_is_set. We do this by using the
+%% process dictionary - when the lock_is_set msg is received, we store
+%% this info. When exchange is received, we can check the dictionary
+%% if the lock_is_set has been received. If not, we store info about
+%% the exchange instead. In the lock_is_set we must first check if
+%% exchange info is stored, in that case we take care of it.
+%%=======================================================================================
+lock_is_set(Node, Known) ->
+ ?FORMAT("~p #### lock_is_set ~p~n",[node(),{Node, Node, Known}]),
+ PVsn = get({prot_vsn, Node}),
+ case PVsn of
+ _ -> % 3 and higher
+ gen_server:cast({global_name_server, Node},
+ {exchange, node(), get_names(), get_names_ext(),
+ get({sync_tag_his, Node})})
+ end,
+ %% If both have the lock, continue with exchange
+ case get({wait_lock, Node}) of
+ {exchange, NameList, NameExtList} ->
+ %% vsn 2, 3
+ put({wait_lock, Node}, lock_is_set),
+ exchange(PVsn, Node, {NameList, NameExtList}, Known);
+ undefined ->
+ put({wait_lock, Node}, lock_is_set)
+ end.
+
+
+
+%%=======================================================================================
+%% exchange
+%%=======================================================================================
+%% Vsn 3 and higher of the protocol
+exchange(_Vsn, Node, {NameList, NameExtList}, Known) ->
+ ?FORMAT("~p #### 3 lock_is_set exchange ~p~n",[node(),{Node, NameList, NameExtList}]),
+ case erase({wait_lock, Node}) of
+ lock_is_set ->
+ {Ops, Resolved} = exchange_names(NameList, Node, [], []),
+ put({save_ops, Node}, Ops),
+ gen_server:cast({global_name_server, Node},
+ {resolved, node(), Resolved, Known,
+ Known, get_names_ext(), get({sync_tag_his, Node})});
+ undefined ->
+ put({wait_lock, Node}, {exchange, NameList, NameExtList})
+ end.
+
+
+
+
+
+resolved(Node, Resolved, {HisKnown, _HisKnown_v2}, Names_ext, S) ->
+ ?P2({resolved, node(), Node, S#state.known}),
+ ?FORMAT("~p #### 2 resolved ~p~n",[node(),{Node, Resolved, HisKnown, Names_ext}]),
+ erase({prot_vsn, Node}),
+ Ops = erase({save_ops, Node}) ++ Resolved,
+ Known = S#state.known,
+ Synced = S#state.synced,
+ NewNodes = [Node | HisKnown],
+ do_ops(Ops),
+ do_ops_ext(Ops,Names_ext),
+ gen_server:abcast(Known, global_name_server,
+ {new_nodes, node(), Ops, Names_ext, NewNodes, NewNodes}),
+ %% I am synced with Node, but not with HisKnown yet
+ lists:foreach(fun(Pid) -> Pid ! {synced, [Node]} end, S#state.syncers),
+ gen_server:abcast(HisKnown, global_name_server, {in_sync, node(), true}),
+ NewS = lists:foldl(fun(Node1, S1) -> cancel_locker(Node1, S1) end,
+ S,
+ NewNodes),
+ %% See (*) below... we're node b in that description
+ NewKnown = Known ++ (NewNodes -- Known),
+ NewS#state{known = NewKnown, synced = [Node | Synced]}.
+
+
+
+
+new_nodes(Ops, Names_ext, Nodes, S) ->
+ ?FORMAT("~p #### 2 new_nodes ~p~n",[node(),{Ops, Names_ext, Nodes}]),
+ do_ops(Ops),
+ do_ops_ext(Ops,Names_ext),
+ Known = S#state.known,
+ %% (*) This one requires some thought...
+ %% We're node a, other nodes b and c:
+ %% The problem is that {in_sync, a} may arrive before {resolved, [a]} to
+ %% b from c, leading to b sending {new_nodes, [a]} to us (node a).
+ %% Therefore, we make sure we never get duplicates in Known.
+ NewNodes = lists:delete(node(), Nodes -- Known),
+ gen_server:abcast(NewNodes, global_name_server, {in_sync, node(), true}),
+ S#state{known = Known ++ NewNodes}.
+
+
+
+
+
+do_whereis(Name, From) ->
+ case is_lock_set(global) of
+ false ->
+ gen_server:reply(From, where(Name));
+ true ->
+ send_again({whereis, Name, From})
+ end.
+
+terminate(_Reason, _S) ->
+ ets:delete(global_names),
+ ets:delete(global_names_ext),
+ ets:delete(global_locks).
+
+code_change(_OldVsn, S, _Extra) ->
+ {ok, S}.
+
+%% Resend init_connect to ourselves.
+resend_pre_connect(Node) ->
+ case erase({pre_connect, Node}) of
+% {Vsn, InitMsg, undef} ->
+% %% Vsn 1 & 2
+% ?PRINT({resend_pre_connect2, node(), self(), Node}),
+% gen_server:cast(self(), {init_connect, Vsn, Node, InitMsg});
+ {Vsn, InitMsg, HisTag} ->
+ %% Vsn 3
+ ?PRINT({resend_pre_connect3, node(), self(), Node}),
+ gen_server:cast(self(), {init_connect, {Vsn, HisTag}, Node, InitMsg});
+ _ ->
+ ?PRINT({resend_pre_connect0, node(), self(), Node}),
+ ok
+ end.
+
+ins_name(Name, Pid, Method) ->
+ case ets:lookup(global_names, Name) of
+ [{Name, Pid2, _}] ->
+ dounlink(Pid2);
+ [] ->
+ ok
+ end,
+ dolink(Pid),
+ ets:insert(global_names, {Name, Pid, Method}).
+
+ins_name_ext(Name, Pid, RegNode) ->
+ case ets:lookup(global_names_ext, Name) of
+ [{Name, Pid2, _}] ->
+ dounlink(Pid2);
+ [] ->
+ ok
+ end,
+ dolink_ext(Pid, RegNode),
+ ets:insert(global_names_ext, {Name, Pid, RegNode}).
+
+where(Name) ->
+ case ets:lookup(global_names, Name) of
+ [{_, Pid, _}] -> Pid;
+ [] -> undefined
+ end.
+
+handle_set_lock({ResourceId, LockRequesterId}, Pid) ->
+ case ets:lookup(global_locks, ResourceId) of
+ [{ResourceId, LockRequesterId, Pids}] ->
+ case lists:member(Pid, Pids) of
+ true ->
+ true;
+ false ->
+ dolink(Pid),
+ ets:insert(global_locks, {ResourceId, LockRequesterId, [Pid | Pids]}),
+ true
+ end;
+ [{ResourceId, _LockRequesterId2, _Pid2}] ->
+ case ResourceId of
+ global ->
+ ?P({before,
+ LockRequesterId,
+ _LockRequesterId2,
+ S#state.lockers}),
+ false;
+ _ ->
+ false
+ end;
+ [] ->
+ dolink(Pid),
+ ets:insert(global_locks, {ResourceId, LockRequesterId, [Pid]}),
+ true
+ end.
+
+is_lock_set(ResourceId) ->
+ case ets:lookup(global_locks, ResourceId) of
+ [_Lock] -> true;
+ [] -> false
+ end.
+
+handle_del_lock({ResourceId, LockRequesterId}, Pid) ->
+ case ets:lookup(global_locks, ResourceId) of
+ [{ResourceId, LockRequesterId, Pids}] when [Pid] == Pids ->
+ ets:delete(global_locks, ResourceId),
+ dounlink(Pid);
+ [{ResourceId, LockRequesterId, Pids}] ->
+ NewPids = lists:delete(Pid, Pids),
+ ets:insert(global_locks, {ResourceId, LockRequesterId, NewPids}),
+ dounlink(Pid);
+ _ -> ok
+ end.
+
+do_ops(Ops) ->
+ lists:foreach(fun({insert, Item}) -> ets:insert(global_names, Item);
+ ({delete, Name}) ->
+ case ets:lookup(global_names, Name) of
+ [{Name, Pid, _}] ->
+ ?P2({do_ops_delete, node(), Name, Pid, node(Pid)}),
+ ets:delete(global_names, Name),
+ dounlink(Pid);
+ [] ->
+ ok
+ end
+ end, Ops).
+
+%% If a new name, then it must be checked if it is an external name
+%% If delete a name it is always deleted from global_names_ext
+do_ops_ext(Ops, Names_ext) ->
+ lists:foreach(fun({insert, {Name, Pid, _Method}}) ->
+ case lists:keysearch(Name, 1, Names_ext) of
+ {value, {Name, Pid, RegNode}} ->
+ ets:insert(global_names_ext, {Name, Pid, RegNode});
+ _ ->
+ ok
+ end;
+ ({delete, Name}) ->
+ ets:delete(global_names_ext, Name)
+ end, Ops).
+
+%%-----------------------------------------------------------------
+%% A locker is a process spawned by global_name_server when a
+%% nodeup is received from a new node. Its purpose is to try to
+%% set a lock in our partition, i.e. on all nodes known to us.
+%% When the lock is set, it tells global about it, and keeps
+%% the lock set. global sends a cancel message to the locker when
+%% the partitions are connected.
+
+%% Versions: at version 2, the messages exchanged between the lockers
+%% include the known nodes (see OTP-3576). There is no way of knowing
+%% the version number of the other side's locker when sending a message
+%% to it, so we send both version 1 and 2, and flush the version 1 if
+%% we receive version 2.
+%%
+%% Due to a mistake, an intermediate version of the new locking protocol
+%% (using 3-tuples) went out in R7, which only understands itself. This patch
+%% to R7 handles all kinds, which means sending all, and flush the ones we
+%% don't want. (It will remain difficult to make a future version of the
+%% protocol communicate with this one.)
+%%
+%%-----------------------------------------------------------------
+%% (Version 2 in patched R7. No named version in R6 and older - let's call that
+%% version 1.)
+-define(locker_vsn, 2).
+
+%%% multi
+
+-record(multi, {known, others = []}).
+
+start_the_locker(Global) ->
+ spawn_link(?MODULE, init_the_locker, [Global]).
+
+%init_the_locker(Global) ->
+% ok;
+init_the_locker(Global) ->
+ process_flag(trap_exit, true), %needed?
+ loop_the_locker(Global, #multi{}),
+ erlang:error(locker_exited).
+
+remove_node(_Node, []) ->
+ [];
+remove_node(Node, [{Node, _HisTheLocker, _HisKnown, _MyTag} | Rest]) ->
+ Rest;
+remove_node(Node, [E | Rest]) ->
+ [E | remove_node(Node, Rest)].
+
+find_node_tag(_Node, []) ->
+ false;
+find_node_tag(Node, [{Node, _HisTheLocker, _HisKnown, MyTag} | _Rest]) ->
+ {true, MyTag};
+find_node_tag(Node, [_E | Rest]) ->
+ find_node_tag(Node, Rest).
+
+loop_the_locker(Global, S) ->
+ ?P2({others, node(), S#multi.others}),
+% Known = S#multi.known,
+ Timeout = case S#multi.others of
+ [] ->
+ infinity;
+ _ ->
+ 0
+ end,
+ receive
+% {nodeup, Node, Known, Tag, P} ->
+% ?P2({the_locker, nodeup, time(), node(), nodeup, Node, Tag}),
+% loop_the_locker(Global, S);
+ {his_the_locker, HisTheLocker, HisKnown, MyKnown} ->
+ ?P2({his_the_locker, time(), node(), HisTheLocker,
+ node(HisTheLocker)}),
+ receive
+ {nodeup, Node, _Known, MyTag, _P} when node(HisTheLocker) == Node ->
+ ?P2({the_locker, nodeup, node(), Node,
+ node(HisTheLocker), MyTag,
+ process_info(self(), messages)}),
+ Others = S#multi.others,
+ loop_the_locker(Global,
+ S#multi{known=MyKnown,
+ others=[{node(HisTheLocker), HisTheLocker, HisKnown, MyTag} | Others]});
+ {cancel, Node, _Tag} when node(HisTheLocker) == Node ->
+ loop_the_locker(Global, S)
+ after 60000 ->
+ ?P2({nodeupnevercame, node(), node(HisTheLocker)}),
+ error_logger:error_msg("global: nodeup never came ~w ~w~n",
+ [node(), node(HisTheLocker)]),
+ loop_the_locker(Global, S)
+ end;
+ {cancel, Node, undefined} ->
+ ?P2({the_locker, cancel1, undefined, node(), Node}),
+%% If we actually cancel something when a cancel message with the tag
+%% 'undefined' arrives, we may be acting on an old nodedown, to cancel
+%% a new nodeup, so we can't do that.
+% receive
+% {nodeup, Node, _Known, _MyTag, _P} ->
+% ?P2({the_locker, cancelnodeup1, node(), Node}),
+% ok
+% after 0 ->
+% ok
+% end,
+% Others = remove_node(Node, S#multi.others),
+% loop_the_locker(Global, S#multi{others = Others});
+ loop_the_locker(Global, S);
+ {cancel, Node, Tag} ->
+ ?P2({the_locker, cancel1, Tag, node(), Node}),
+ receive
+ {nodeup, Node, _Known, Tag, _P} ->
+ ?P2({the_locker, cancelnodeup2, node(), Node}),
+ ok
+ after 0 ->
+ ok
+ end,
+ Others = remove_node(Node, S#multi.others),
+ loop_the_locker(Global, S#multi{others = Others});
+ {lock_set, _Pid, false, _} ->
+ ?P2({the_locker, spurious, node(), node(_Pid)}),
+ loop_the_locker(Global, S);
+ {lock_set, Pid, true, HisKnown} ->
+ Node = node(Pid),
+ ?P2({the_locker, spontaneous, node(), Node}),
+
+ NewKnown = gen_server:call(global_name_server, get_known),
+
+ Others =
+ case find_node_tag(Node, S#multi.others) of
+ {true, MyTag} ->
+
+ BothsKnown = HisKnown -- (HisKnown -- NewKnown),
+ Known1 = if
+ node() < Node ->
+ [node() | NewKnown];
+ true ->
+ [node() | NewKnown] -- BothsKnown
+ end,
+
+ ?P2({lock1, node()}),
+ LockId = {global, self()},
+ IsLockSet = set_lock(LockId, Known1, 1),
+ Pid ! {lock_set, self(), IsLockSet, NewKnown},
+ ?P2({the_locker, spontaneous, node(), Node, IsLockSet}),
+ case IsLockSet of
+ true ->
+ gen_server:cast(global_name_server,
+ {lock_is_set, Node, MyTag}),
+ ?P1({lock_sync_done, time(), node(),
+ {Pid, node(Pid)}, self()}),
+ %% Wait for global to tell us to remove lock.
+ receive
+ {cancel, Node, _Tag} ->
+ %% All conflicts are resolved,
+ %% remove lock.
+ ?PRINT({node(), self(), locked1}),
+ del_lock(LockId, Known1);
+ {'EXIT', Pid, _} ->
+ ?PRINT({node(), self(), locked2}),
+ %% Other node died;
+ %% remove lock and ignore him.
+ del_lock(LockId, Known1),
+ link(Global)
+ end,
+ remove_node(Node, S#multi.others);
+ false ->
+ S#multi.others
+ end;
+ false ->
+ ?P2({the_locker, spontaneous, node(), Node, not_there}),
+ Pid ! {lock_set, self(), false, NewKnown},
+ S#multi.others
+ end,
+ loop_the_locker(Global, S#multi{others = Others});
+ Other when element(1, Other) /= nodeup ->
+ ?P2({the_locker, other_msg, Other}),
+ loop_the_locker(Global, S)
+ after Timeout ->
+ NewKnown = gen_server:call(global_name_server, get_known),
+ [{Node, HisTheLocker, HisKnown, MyTag} | Rest] = S#multi.others,
+ BothsKnown = HisKnown -- (HisKnown -- NewKnown),
+ Known1 = if
+ node() < Node ->
+ [node() | NewKnown];
+ true ->
+ [node() | NewKnown] -- BothsKnown
+ end,
+ ?P2({picking, node(), Node}),
+ case lists:member(Node, NewKnown) of
+ false ->
+ LockId = {global, self()},
+ ?P2({lock2, node()}),
+ IsLockSet = set_lock(LockId, Known1, 1),
+ Others =
+ case IsLockSet of
+ true ->
+ HisTheLocker ! {lock_set, self(),
+ IsLockSet, NewKnown},
+ %% OTP-4902
+ lock_set_loop(Global, S,
+ Node, MyTag, Rest,
+ Known1,
+ LockId);
+ false ->
+ ?P2({the_locker, not_locked, node(),
+ Node}),
+ S#multi.others
+ end,
+ loop_the_locker(Global, S#multi{known=NewKnown,
+ others = Others});
+ true ->
+ ?P2({is_known, node(), Node}),
+ loop_the_locker(Global, S#multi{known=NewKnown,
+ others = Rest})
+ end
+ end.
+
+lock_set_loop(Global, S, Node, MyTag, Rest, Known1, LockId) ->
+ receive
+ {lock_set, P, true, _} when node(P) == Node ->
+ ?P2({the_locker, both_set, node(), Node}),
+
+ %% do sync
+ gen_server:cast(global_name_server, {lock_is_set, Node, MyTag}),
+ ?P1({lock_sync_done, time(), node(), {Pid, node(Pid)}, self()}),
+
+ %% Wait for global to tell us to remove lock.
+ receive
+ {cancel, Node, _} ->
+ %% All conflicts are resolved, remove lock.
+ ?PRINT({node(), self(), locked1}),
+ del_lock(LockId, Known1);
+ {'EXIT', _Pid, _} ->
+ ?PRINT({node(), self(), locked2}),
+ %% Other node died; remove lock and ignore him.
+ del_lock(LockId, Known1),
+ link(Global)
+ end,
+ Rest;
+ {lock_set, P, false, _} when node(P) == Node ->
+ ?P2({the_locker, not_both_set, node(), Node}),
+ del_lock(LockId, Known1),
+ S#multi.others;
+ {cancel, Node, _} ->
+ ?P2({the_locker, cancel2, node(), Node}),
+ del_lock(LockId, Known1),
+ remove_node(Node, S#multi.others);
+ {'EXIT', _, _} ->
+ ?P2({the_locker, exit, node(), Node}),
+ del_lock(LockId, Known1),
+ S#multi.others
+
+ after
+ %% OTP-4902
+ %% A cyclic deadlock could occur in rare cases where three or
+ %% more nodes waited for a reply from each other.
+ %% Therefore, reject lock_set attempts in this state from
+ %% nodes < this node (its enough if at least one node in
+ %% the cycle rejects and thus breaks the deadlock)
+ 5000 ->
+ reject_lock_set(),
+ lock_set_loop(Global, S, Node, MyTag, Rest, Known1, LockId)
+ end.
+
+reject_lock_set() ->
+ receive
+ {lock_set, P, true, _} when node(P) < node() ->
+ P ! {lock_set, self(), false, []},
+ reject_lock_set()
+ after
+ 0 ->
+ true
+ end.
+
+start_locker(Node, Known, MyTag, Global, TheLocker) ->
+ %% No link here! The del_lock call would delete the link anyway.
+ %% global_name_server has control of these processes anyway...
+ %% When the locker process exits due to being sent the 'cancel' message
+ %% by the server, the server then removes it from its tables.
+ %% When the locker terminates due to other reasons, the server must
+ %% be told, so we make a link to it just before exiting.
+ spawn(?MODULE, init_locker, [Node, Known, MyTag, Global, TheLocker]).
+
+init_locker(Node, Known, MyTag, Global, TheLocker) ->
+ process_flag(trap_exit, true),
+ ?PRINT({init_locker, node(), self(), Node}),
+ ?P1({init_locker, time(), node(), self(), Node}),
+ receive
+ {his_locker, Pid, HisKnown} ->
+ ?PRINT({init_locker, node(), self(), his_locker, Node}),
+ link(Pid),
+ %% If two nodes in a group of nodes first disconnect
+ %% and then reconnect, this causes global to deadlock.
+ %% This because both of the reconnecting nodes
+ %% tries to set lock on the other nodes in the group.
+ %% This is solved by letting only one of the reconneting nodes set the lock.
+ BothsKnown = HisKnown -- (HisKnown -- Known),
+ ?P({loop_locker1, node(), {Pid, node(Pid)}}),
+ Res = loop_locker(Node, Pid, Known, 1, MyTag, BothsKnown, Global),
+ ?P({loop_locker2, node(), {Pid, node(Pid)}}),
+ Res;
+ {his_locker_new, HisTheLocker, {Known1, Known2}} ->
+ %% slide into the vsn 4 stuff
+ ?P2({his_locker_new, node()}),
+ HisTheLocker ! {his_the_locker, TheLocker, Known1, Known2},
+ exit(normal);
+ cancel ->
+ ?PRINT({init_locker, node(), self(), cancel, Node}),
+ exit(normal)
+ end.
+
+loop_locker(Node, Pid, Known0, Try, MyTag, BothsKnown, Global) ->
+ Known = if
+ node() < Node ->
+ [node() | Known0];
+ true ->
+ [node() | Known0] -- BothsKnown
+ end,
+
+ ?PRINT({locking, node(), self(), Known}),
+ LockId = {global, self()},
+ ?P2({lock3, node()}),
+ IsLockSet = set_lock(LockId, Known, 1),
+ ?P({loop_locker, IsLockSet,
+ node(), {Pid, node(Pid)}, self(), Try}),
+ ?P1({loop_locker, time(), IsLockSet,
+ node(), {Pid, node(Pid)}, self(), Try}),
+ ?PRINT({locking1, node(), self(), Known, IsLockSet}),
+ %% Tell other node that we managed to get the lock.
+ Pid ! {lock, ?locker_vsn, IsLockSet, Known},
+ Pid ! {lock, IsLockSet, Known},
+ Pid ! {lock, IsLockSet},
+ %% Wait for other node's result.
+ receive
+ %% R7 patched and later
+ {lock, _LockerVsn, true, _} when IsLockSet == true ->
+ receive
+ {lock, _} ->
+ ok
+ end,
+ receive
+ {lock, _, _} ->
+ ok
+ end,
+ ?PRINT({node(), self(), locked}),
+ %% Now we got the lock in both partitions. Tell
+ %% global, and let him resolve name conflict.
+ ?P1({lock_sync, time(), node(), {Pid, node(Pid)}, self()}),
+ gen_server:cast(global_name_server, {lock_is_set, Node, MyTag}),
+ ?P1({lock_sync_done, time(), node(), {Pid, node(Pid)}, self()}),
+ %% Wait for global to tell us to remove lock.
+ receive
+ cancel ->
+ %% All conflicts are resolved, remove lock.
+ ?PRINT({node(), self(), locked1}),
+ del_lock(LockId, Known);
+ {'EXIT', Pid, _} ->
+ ?PRINT({node(), self(), locked2}),
+ %% Other node died; remove lock and ignore him.
+ del_lock(LockId, Known),
+ link(Global)
+ end;
+ {lock, _LockerVsn, _, HisKnown} ->
+ receive
+ {lock, _} ->
+ ok
+ end,
+ receive
+ {lock, _, _} ->
+ ok
+ end,
+ %% Some of us failed to get the lock; try again
+ ?PRINT({node(), self(), locked0}),
+ d_lock(IsLockSet, LockId, Known),
+ try_again_locker(Node, Pid, Try, MyTag, HisKnown, Global);
+ %% R7 unpatched
+ {lock, true, _} when IsLockSet == true ->
+ ?PRINT({node(), self(), locked}),
+ %% Now we got the lock in both partitions. Tell
+ %% global, and let him resolve name conflict.
+ gen_server:cast(global_name_server, {lock_is_set, Node, MyTag}),
+ %% Wait for global to tell us to remove lock.
+ receive
+ cancel ->
+ %% All conflicts are resolved, remove lock.
+ ?PRINT({node(), self(), locked1}),
+ del_lock(LockId, Known);
+ {'EXIT', Pid, _} ->
+ ?PRINT({node(), self(), locked2}),
+ %% Other node died; remove lock and ignore him.
+ del_lock(LockId, Known),
+ link(Global)
+ end;
+ {lock, _, HisKnown} ->
+ %% Some of us failed to get the lock; try again
+ ?PRINT({node(), self(), locked0}),
+ d_lock(IsLockSet, LockId, Known),
+ try_again_locker(Node, Pid, Try, MyTag, HisKnown, Global);
+ %% R6 and earlier
+ {lock, true} when IsLockSet == true ->
+ ?PRINT({node(), self(), locked}),
+ %% Now we got the lock in both partitions. Tell
+ %% global, and let him resolve name conflict.
+ gen_server:cast(global_name_server, {lock_is_set, Node, MyTag}),
+ %% Wait for global to tell us to remove lock.
+ receive
+ cancel ->
+ %% All conflicts are resolved, remove lock.
+ ?PRINT({node(), self(), locked1}),
+ del_lock(LockId, Known);
+ {'EXIT', Pid, _} ->
+ ?PRINT({node(), self(), locked2}),
+ %% Other node died; remove lock and ignore him.
+ del_lock(LockId, Known),
+ link(Global)
+ end;
+ {lock, _} ->
+ %% Some of us failed to get the lock; try again
+ ?PRINT({node(), self(), locked0}),
+ d_lock(IsLockSet, LockId, Known),
+ try_again_locker(Node, Pid, Try, MyTag, BothsKnown, Global);
+ {'EXIT', Pid, _} ->
+ %% Other node died; remove lock and ignore him.
+ ?PRINT({node(), self(), locked7}),
+ d_lock(IsLockSet, LockId, Known),
+ link(Global);
+ cancel ->
+ ?PRINT({node(), self(), locked8}),
+ d_lock(IsLockSet, LockId, Known)
+ end.
+
+d_lock(true, LockId, Known) -> del_lock(LockId, Known);
+d_lock(false, _, _) -> ok.
+
+try_again_locker(Node, Pid, Try, MyTag, HisKnown, Global) ->
+ ?PRINT({try_again, node(), self(), Node, Pid, Known, Try, MyTag}),
+ ?P1({try_again, time(), node(), self(), Node, Pid, Known, Try, MyTag}),
+ random_sleep(Try),
+ ?P1({try_again2, time(), node(), self(), Node, Pid, Known, Try, MyTag}),
+ NewKnown = gen_server:call(global_name_server, get_known),
+ case lists:member(Node, NewKnown) of
+ false ->
+ BothsKnown1 = HisKnown -- (HisKnown -- NewKnown),
+ ?PRINT({node(), self(), Node, again, notknown}),
+ ?PRINT({bothknown, BothsKnown, BothsKnown1}),
+ loop_locker(Node, Pid, NewKnown, Try+1, MyTag,
+ BothsKnown1, Global);
+ true ->
+ ?PRINT({node(), self(), Node, again, known}),
+ link(Global),
+ %% Node is already handled, we are ready.
+ ok
+ end.
+
+cancel_locker(Node, S) ->
+ %% multi
+ ?P2({cancel, node(), Node, get({sync_tag_my, Node})}),
+ S#state.the_locker ! {cancel, Node, get({sync_tag_my, Node})},
+
+ Lockers = S#state.lockers,
+ case lists:keysearch(Node, 1, Lockers) of
+ {value, {_, Pid}} ->
+ Pid ! cancel,
+ ?PRINT({cancel, Node, lockers, node(), Lockers}),
+ S#state{lockers = lists:keydelete(Node, 1, Lockers)};
+ _ ->
+ S
+ end.
+
+%% A node sent us his names. When a name clash is found, the resolve
+%% function is called from the smaller node => all resolve funcs are called
+%% from the same partition.
+exchange_names([{Name, Pid, Method} |Tail], Node, Ops, Res) ->
+ case ets:lookup(global_names, Name) of
+ [{Name, Pid, _}] ->
+ exchange_names(Tail, Node, Ops, Res);
+ [{Name, Pid2, Method2}] when node() < Node ->
+ %% Name clash! Add the result of resolving to Res(olved).
+ %% We know that node(Pid) /= node(), so we don't
+ %% need to link/unlink to Pid.
+ Node2 = node(Pid2), %%&&&&&& check external node???
+ case rpc:call(Node2, ?MODULE, resolve_it,
+ [Method2, Name, Pid, Pid2]) of
+ Pid ->
+ dounlink(Pid2),
+ ets:insert(global_names, {Name, Pid, Method}),
+ Op = {insert, {Name, Pid, Method}},
+ exchange_names(Tail, Node, [Op | Ops], [Op | Res]);
+ Pid2 ->
+ Op = {insert, {Name, Pid2, Method2}},
+ exchange_names(Tail, Node, Ops, [Op | Res]);
+ none ->
+ dounlink(Pid2),
+ ?P2({unregister, node(), Name, Pid2, node(Pid2)}),
+ ets:delete(global_names, Name),
+ Op = {delete, Name},
+ exchange_names(Tail, Node, [Op | Ops], [Op | Res]);
+ {badrpc, Badrpc} ->
+ error_logger:info_msg("global: badrpc ~w received when "
+ "conflicting name ~w was found",
+ [Badrpc, Name]),
+ dounlink(Pid2),
+ ets:insert(global_names, {Name, Pid, Method}),
+ Op = {insert, {Name, Pid, Method}},
+ exchange_names(Tail, Node, [Op | Ops], [Op | Res]);
+ Else ->
+ error_logger:info_msg("global: Resolve method ~w for "
+ "conflicting name ~w returned ~w~n",
+ [Method, Name, Else]),
+ dounlink(Pid2),
+ ets:delete(global_names, Name),
+ Op = {delete, Name},
+ exchange_names(Tail, Node, [Op | Ops], [Op | Res])
+ end;
+ [{Name, _Pid2, _}] ->
+ %% The other node will solve the conflict.
+ exchange_names(Tail, Node, Ops, Res);
+ _ ->
+ %% Entirely new name.
+ ets:insert(global_names, {Name, Pid, Method}),
+ exchange_names(Tail, Node,
+ [{insert, {Name, Pid, Method}} | Ops], Res)
+ end;
+exchange_names([], _, Ops, Res) ->
+ {Ops, Res}.
+
+resolve_it(Method, Name, Pid1, Pid2) ->
+ catch Method(Name, Pid1, Pid2).
+
+minmax(P1,P2) ->
+ if node(P1) < node(P2) -> {P1, P2}; true -> {P2, P1} end.
+
+random_exit_name(Name, Pid, Pid2) ->
+ {Min, Max} = minmax(Pid, Pid2),
+ error_logger:info_msg("global: Name conflict terminating ~w~n",
+ [{Name, Max}]),
+ exit(Max, kill),
+ Min.
+
+random_notify_name(Name, Pid, Pid2) ->
+ {Min, Max} = minmax(Pid, Pid2),
+ Max ! {global_name_conflict, Name},
+ Min.
+
+notify_all_name(Name, Pid, Pid2) ->
+ Pid ! {global_name_conflict, Name, Pid2},
+ Pid2 ! {global_name_conflict, Name, Pid},
+ none.
+
+cnode(Name, Pid, Pid2) ->
+ {Min, Max} = minmax(Pid, Pid2),
+ error_logger:info_msg("global: Name conflict terminating ~w~n",
+ [{Name, Max}]),
+ Max ! {global_name_conflict, Name},
+ Min.
+
+%% Only link to pids on our own node
+dolink(Pid) when node(Pid) == node() ->
+ link(Pid);
+dolink(_) -> ok.
+
+%% Only link to pids on our own node
+dolink_ext(Pid, RegNode) when RegNode == node() -> link(Pid);
+dolink_ext(_, _) -> ok.
+
+dounlink(Pid) when node(Pid) == node() ->
+ case ets:match(global_names, {'_', Pid, '_'}) of
+ [] ->
+ case is_pid_used(Pid) of
+ false ->
+ unlink(Pid);
+ true -> ok
+ end;
+ _ -> ok
+ end;
+dounlink(_Pid) ->
+ ok.
+
+is_pid_used(Pid) ->
+ is_pid_used(ets:tab2list(global_locks), Pid).
+
+is_pid_used([], _Pid) ->
+ false;
+is_pid_used([{_ResourceId, _LockReqId, Pids} | Tail], Pid) ->
+ case lists:member(Pid, Pids) of
+ true ->
+ true;
+ false ->
+ is_pid_used(Tail, Pid)
+ end.
+
+
+
+%% check_exit/3 removes the Pid from affected tables.
+%% This function needs to abcast the thingie since only the local
+%% server is linked to the registered process (or the owner of the
+%% lock). All the other servers rely on the nodedown mechanism.
+check_exit(Deleter, Pid) ->
+ del_names(Deleter, Pid, ets:tab2list(global_names)),
+ del_locks(ets:tab2list(global_locks), Pid).
+
+del_names(Deleter, Pid, [{Name, Pid, _Method} | Tail]) ->
+ %% First, delete the Pid from the local ets; then send to other nodes
+ ets:delete(global_names, Name),
+ ets:delete(global_names_ext, Name),
+ dounlink(Pid),
+ Deleter ! {delete_name,self(),Name,Pid},
+ del_names(Deleter, Pid, Tail);
+del_names(Deleter, Pid, [_|T]) ->
+ del_names(Deleter, Pid, T);
+del_names(_Deleter, _Pid, []) -> done.
+
+del_locks([{ResourceId, LockReqId, Pids} | Tail], Pid) ->
+ case {lists:member(Pid, Pids), Pids} of
+ {true, [Pid]} ->
+ ets:delete(global_locks, ResourceId),
+ gen_server:abcast(nodes(), global_name_server,
+ {async_del_lock, ResourceId, Pid});
+ {true, _} ->
+ NewPids = lists:delete(Pid, Pids),
+ ets:insert(global_locks, {ResourceId, LockReqId, NewPids}),
+ gen_server:abcast(nodes(), global_name_server,
+ {async_del_lock, ResourceId, Pid});
+ _ ->
+ continue
+ end,
+ del_locks(Tail, Pid);
+del_locks([], _Pid) -> done.
+
+del_locks2([{ResourceId, LockReqId, Pids} | Tail], Pid) ->
+ case {lists:member(Pid, Pids), Pids} of
+ {true, [Pid]} ->
+ ets:delete(global_locks, ResourceId);
+ {true, _} ->
+ NewPids = lists:delete(Pid, Pids),
+ ets:insert(global_locks, {ResourceId, LockReqId, NewPids});
+ _ ->
+ continue
+ end,
+ del_locks2(Tail, Pid);
+del_locks2([], _Pid) ->
+ done.
+
+
+
+%% Unregister all Name/Pid pairs such that node(Pid) == Node
+%% and delete all locks where node(Pid) == Node
+do_node_down(Node) ->
+ do_node_down_names(Node, ets:tab2list(global_names)),
+ do_node_down_names_ext(Node, ets:tab2list(global_names_ext)),
+ do_node_down_locks(Node, ets:tab2list(global_locks)).
+
+do_node_down_names(Node, [{Name, Pid, _Method} | T]) when node(Pid) == Node ->
+ ets:delete(global_names, Name),
+ do_node_down_names(Node, T);
+do_node_down_names(Node, [_|T]) ->
+ do_node_down_names(Node, T);
+do_node_down_names(_, []) -> ok.
+
+%%remove all external names registered on the crashed node
+do_node_down_names_ext(Node, [{Name, _Pid, Node} | T]) ->
+ ets:delete(global_names, Name),
+ ets:delete(global_names_ext, Name),
+ do_node_down_names_ext(Node, T);
+do_node_down_names_ext(Node, [_|T]) ->
+ do_node_down_names_ext(Node, T);
+do_node_down_names_ext(_, []) -> ok.
+
+do_node_down_locks(Node, [{ResourceId, LockReqId, Pids} | T]) ->
+ case do_node_down_locks2(Pids, Node) of
+ [] ->
+ continue;
+ RemovePids ->
+ case Pids -- RemovePids of
+ [] ->
+ ets:delete(global_locks, ResourceId);
+ NewPids ->
+ ets:insert(global_locks, {ResourceId, LockReqId, NewPids})
+ end
+ end,
+ do_node_down_locks(Node, T);
+do_node_down_locks(Node, [_|T]) ->
+ do_node_down_locks(Node, T);
+do_node_down_locks(_, []) -> done.
+
+
+do_node_down_locks2(Pids, Node) ->
+ do_node_down_locks2(Pids, Node, []).
+
+do_node_down_locks2([], _Node, Res) ->
+ Res;
+do_node_down_locks2([Pid | Pids], Node, Res) when node(Pid) == Node ->
+ do_node_down_locks2(Pids, Node, [Pid | Res]);
+do_node_down_locks2([_ | Pids], Node, Res) ->
+ do_node_down_locks2(Pids, Node, Res).
+
+
+get_names() ->
+ ets:tab2list(global_names).
+
+get_names_ext() ->
+ ets:tab2list(global_names_ext).
+
+random_sleep(Times) ->
+ case (Times rem 10) of
+ 0 -> erase(random_seed);
+ _ -> ok
+ end,
+ case get(random_seed) of
+ undefined ->
+ {A1, A2, A3} = now(),
+ random:seed(A1, A2, A3 + erlang:phash(node(), 100000));
+ _ -> ok
+ end,
+ %% First time 1/4 seconds, then doubling each time up to 8 seconds max.
+ Tmax = if Times > 5 -> 8000;
+ true -> ((1 bsl Times) * 1000) div 8
+ end,
+ T = random:uniform(Tmax),
+ ?P({random_sleep, node(), self(), Times, T}),
+ receive after T -> ok end.
+
+dec(infinity) -> infinity;
+dec(N) -> N-1.
+
+send_again(Msg) ->
+ spawn_link(?MODULE, timer, [self(), Msg]).
+
+timer(Pid, Msg) ->
+ random_sleep(5),
+ Pid ! Msg.
+
+change_our_node_name(NewNode, S) ->
+ S#state{node_name = NewNode}.
+
+
+%%-----------------------------------------------------------------
+%% Each sync process corresponds to one call to sync. Each such
+%% process asks the global_name_server on all Nodes if it is in sync
+%% with Nodes. If not, that (other) node spawns a syncer process that
+%% waits for global to get in sync with all Nodes. When it is in
+%% sync, the syncer process tells the original sync process about it.
+%%-----------------------------------------------------------------
+start_sync(Nodes, From) ->
+ spawn_link(?MODULE, sync_init, [Nodes, From]).
+
+sync_init(Nodes, From) ->
+ lists:foreach(fun(Node) -> monitor_node(Node, true) end, Nodes),
+ sync_loop(Nodes, From).
+
+sync_loop([], From) ->
+ gen_server:reply(From, ok);
+sync_loop(Nodes, From) ->
+ receive
+ {nodedown, Node} ->
+ monitor_node(Node, false),
+ sync_loop(lists:delete(Node, Nodes), From);
+ {synced, SNodes} ->
+ lists:foreach(fun(N) -> monitor_node(N, false) end, SNodes),
+ sync_loop(Nodes -- SNodes, From)
+ end.
+
+
+%%%====================================================================================
+%%% Get the current global_groups definition
+%%%====================================================================================
+check_sync_nodes() ->
+ case get_own_nodes() of
+ {ok, all} ->
+ nodes();
+ {ok, NodesNG} ->
+ %% global_groups parameter is defined, we are not allowed to sync
+ %% with nodes not in our own global group.
+ (nodes() -- (nodes() -- NodesNG));
+ {error, Error} ->
+ {error, Error}
+ end.
+
+check_sync_nodes(SyncNodes) ->
+ case get_own_nodes() of
+ {ok, all} ->
+ SyncNodes;
+ {ok, NodesNG} ->
+ %% global_groups parameter is defined, we are not allowed to sync
+ %% with nodes not in our own global group.
+ OwnNodeGroup = (nodes() -- (nodes() -- NodesNG)),
+ IllegalSyncNodes = (SyncNodes -- [node() | OwnNodeGroup]),
+ case IllegalSyncNodes of
+ [] -> SyncNodes;
+ _ -> {error, {"Trying to sync nodes not defined in the own global group",
+ IllegalSyncNodes}}
+ end;
+ {error, Error} ->
+ {error, Error}
+ end.
+
+get_own_nodes() ->
+ case global_group:get_own_nodes_with_errors() of
+ {error, Error} ->
+ {error, {"global_groups definition error", Error}};
+ OkTup ->
+ OkTup
+ end.
+
+
+%%-----------------------------------------------------------------
+%% The deleter process is a satellite process to global_name_server
+%% that does background batch deleting of names when a process
+%% that had globally registered names dies. It is started by and
+%% linked to global_name_server.
+%%-----------------------------------------------------------------
+
+start_the_deleter(Global) ->
+ spawn_link(
+ fun () ->
+ loop_the_deleter(Global)
+ end).
+
+loop_the_deleter(Global) ->
+ Deletions = collect_deletions(Global, []),
+ trans({global, self()},
+ fun() ->
+ lists:map(
+ fun ({Name,Pid}) ->
+ ?P2({delete_name2, Name, Pid, nodes()}),
+ gen_server:abcast(nodes(), global_name_server,
+ {async_del_name, Name, Pid})
+ end, Deletions)
+ end,
+ nodes()),
+ loop_the_deleter(Global).
+
+collect_deletions(Global, Deletions) ->
+ receive
+ {delete_name,Global,Name,Pid} ->
+ ?P2({delete_name, node(), self(), Name, Pid, nodes()}),
+ collect_deletions(Global, [{Name,Pid}|Deletions]);
+ Other ->
+ error_logger:error_msg("The global_name_server deleter process "
+ "received an unexpected message:\n~p\n",
+ [Other]),
+ collect_deletions(Global, Deletions)
+ after case Deletions of
+ [] -> infinity;
+ _ -> 0
+ end ->
+ lists:reverse(Deletions)
+ end.
diff --git a/lib/dialyzer/test/plt_tests_SUITE.erl b/lib/dialyzer/test/plt_tests_SUITE.erl
new file mode 100644
index 0000000000..bf45020340
--- /dev/null
+++ b/lib/dialyzer/test/plt_tests_SUITE.erl
@@ -0,0 +1,21 @@
+%% This suite is the only hand made and simply
+%% checks if we can build a plt.
+
+-module(plt_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, all/0, build_plt/1]).
+
+suite() ->
+ [{timetrap, ?plt_timeout}].
+
+all() -> [build_plt].
+
+build_plt(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ ok -> ok;
+ fail -> ct:fail(plt_build_fail)
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE.erl b/lib/dialyzer/test/r9c_tests_SUITE.erl
new file mode 100644
index 0000000000..cd5bd5ec61
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE.erl
@@ -0,0 +1,64 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(r9c_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([r9c_tests_SUITE_consistency/1, asn1/1, inets/1, mnesia/1]).
+
+suite() ->
+ [{timetrap, {minutes, 20}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, [{defines,[{vsn,42}]}]}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [r9c_tests_SUITE_consistency,asn1,inets,mnesia].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+r9c_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+asn1(Config) ->
+ case dialyze(Config, asn1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+inets(Config) ->
+ case dialyze(Config, inets) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia(Config) ->
+ case dialyze(Config, mnesia) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/r9c_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..e00e23bb66
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/dialyzer_options
@@ -0,0 +1,2 @@
+{dialyzer_options, [{defines, [{vsn, 42}]}]}.
+{time_limit, 20}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/results/asn1 b/lib/dialyzer/test/r9c_tests_SUITE_data/results/asn1
new file mode 100644
index 0000000000..ac83366bc8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/results/asn1
@@ -0,0 +1,106 @@
+
+asn1ct.erl:1500: The variable Err can never match since previous clauses completely covered the type #type{}
+asn1ct.erl:1596: The variable _ can never match since previous clauses completely covered the type 'ber_bin_v2'
+asn1ct.erl:1673: The pattern 'all' can never match the type 'asn1_module' | 'exclusive_decode' | 'partial_decode'
+asn1ct.erl:672: The pattern <{'false', Result}, _, _> can never match the type <{'true','true'},atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()],[any()]>
+asn1ct.erl:909: Guard test is_atom(Ext::[49 | 97 | 98 | 100 | 110 | 115]) can never succeed
+asn1ct_check.erl:1698: The pattern {'error', _} can never match the type [any()]
+asn1ct_check.erl:2733: The pattern {'type', Tag, _, _, _, _} can never match the type 'ASN1_OPEN_TYPE' | {_,_} | {'fixedtypevaluefield',_,_}
+asn1ct_check.erl:2738: The pattern <_S, _> can never match since previous clauses completely covered the type <#state{},#ObjectClassFieldType{class::#objectclass{fields::maybe_improper_list() | {_,_,_,_}},fieldname::{_,maybe_improper_list()},type::'ASN1_OPEN_TYPE' | {_,_} | {'fixedtypevaluefield',_,_}}>
+asn1ct_check.erl:2887: The variable Other can never match since previous clauses completely covered the type any()
+asn1ct_check.erl:3188: The pattern <_S, [], B> can never match the type <#state{},{'SingleValue',_},{'ValueRange',_}>
+asn1ct_check.erl:3190: The pattern <_S, A, []> can never match the type <#state{},{'SingleValue',_},{'ValueRange',_}>
+asn1ct_check.erl:3212: The pattern {[], C3} can never match the type {[any(),...],{'ValueRange',{'MIN','MAX'}}}
+asn1ct_check.erl:3225: The pattern {L1, UbNew} can never match the type 'false'
+asn1ct_check.erl:3228: The pattern {L1, LbNew} can never match the type 'false'
+asn1ct_check.erl:3235: The call asn1ct_check:remove_val_from_list(number(),L::[any(),...]) will never return since it differs in the 1st argument from the success typing arguments: ([any()],any())
+asn1ct_check.erl:3240: The call asn1ct_check:remove_val_from_list(number(),L::[any(),...]) will never return since it differs in the 1st argument from the success typing arguments: ([any()],any())
+asn1ct_check.erl:3242: Function remove_val_from_list/2 has no local return
+asn1ct_check.erl:3243: The call lists:member(Val::[any(),...],List::number()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),[any()])
+asn1ct_check.erl:3283: The pattern [] can never match the type [any(),...]
+asn1ct_check.erl:3362: The pattern <_, [], _VR> can never match the type <#state{},[any(),...],[any(),...]>
+asn1ct_check.erl:3364: The pattern <_, _SV, []> can never match the type <#state{},[any(),...],[any(),...]>
+asn1ct_check.erl:4150: The pattern <_, [_]> can never match the type <_,[]>
+asn1ct_check.erl:4314: The pattern <S, Type, {Rlist, ExtList}> can never match the type <#state{},_,[any()]>
+asn1ct_check.erl:4360: The pattern <S, Type, {Rlist, ExtList}> can never match the type <#state{},_,[any()]>
+asn1ct_check.erl:4719: The call asn1ct_check:error({'type',{'asn1',[1..255,...],[any(),...]}}) will never return since it differs in the 1st argument from the success typing arguments: ({'ObjectSet' | 'class' | 'export' | 'ptype' | 'type' | 'value',_,#state{}})
+asn1ct_check.erl:5120: Guard test is_record(Type::{_,_} | {'fixedtypevaluefield',_,_},'type',6) can never succeed
+asn1ct_check.erl:5128: Guard test is_record(Type::{_,_} | {'fixedtypevaluefield',_,_},'type',6) can never succeed
+asn1ct_check.erl:540: The pattern <_S, {'poc', _ObjSet, _Params}> can never match since previous clauses completely covered the type <#state{},_>
+asn1ct_check.erl:5517: The pattern <_, []> can never match the type <_,[{'ABSTRACT-SYNTAX',{_,_,_}} | {'TYPE-IDENTIFIER',{_,_,_}},...]>
+asn1ct_constructed_ber.erl:1075: The pattern {{{'ObjectClassFieldType', _, _, _, {'objectfield', PrimFieldName1, PFNList}}, _}, {'componentrelation', _, _}} can never match the type {#type{},_}
+asn1ct_constructed_ber.erl:695: The pattern {'EXTENSIONMARK', _, _} can never match the type #ComponentType{}
+asn1ct_constructed_ber.erl:748: The pattern <Erules, TopType, {CompList, _ExtList}> can never match the type <_,maybe_improper_list(),[#ComponentType{typespec::{_,_,_,_,_,_}}]>
+asn1ct_constructed_ber_bin_v2.erl:914: The pattern {{{'ObjectClassFieldType', _, _, _, {'objectfield', PrimFieldName1, PFNList}}, _}, {'componentrelation', _, _}} can never match the type {#type{},_}
+asn1ct_gen.erl:740: The pattern [] can never match the type [any(),...]
+asn1ct_gen_ber.erl:974: The pattern <Erules, [{Name, Def} | Rest]> can never match the type <_,[#typedef{name::atom(),typespec::{_,_,_,_,_,_}}]>
+asn1ct_gen_ber_bin_v2.erl:975: The pattern <Erules, [{Name, Def} | Rest]> can never match the type <_,[#typedef{name::atom(),typespec::{_,_,_,_,_,_}}]>
+asn1ct_gen_per.erl:646: The pattern <Erules, [{Name, Def} | Rest]> can never match the type <_,[#typedef{name::atom()}]>
+asn1ct_gen_per_rt2ct.erl:1189: The pattern <Erules, [{Name, Def} | Rest]> can never match the type <_,[#typedef{name::atom()}]>
+asn1ct_gen_per_rt2ct.erl:563: The pattern <C, ['EXT_MARK' | T], _Count> can never match the type <[{'ValueRange',{_,_}},...],[char() | {'asn1_enum',integer()},...],non_neg_integer()>
+asn1ct_gen_per_rt2ct.erl:580: The pattern <_C, 'EXT_MARK', _Count> can never match the type <[{'ValueRange',{_,_}},...],char(),non_neg_integer()>
+asn1ct_gen_per_rt2ct.erl:583: The pattern <_C, {1, EnumName}, Count> can never match the type <[{'ValueRange',{_,_}},...],char(),non_neg_integer()>
+asn1ct_gen_per_rt2ct.erl:587: The pattern <C, {0, EnumName}, Count> can never match the type <[{'ValueRange',{_,_}},...],char(),non_neg_integer()>
+asn1ct_gen_per_rt2ct.erl:656: The pattern <Type, C> can never match since previous clauses completely covered the type <'bitstring' | 'integer',_>
+asn1ct_parser2.erl:2017: Call to missing or unexported function ordsets:list_to_set/1
+asn1ct_parser2.erl:2497: The variable _ can never match since previous clauses completely covered the type 'ok'
+asn1ct_parser2.erl:2628: The pattern {Rlist, ExtList} can never match the type [{_,_,_},...]
+asn1ct_parser2.erl:2660: Call to missing or unexported function ordsets:list_to_set/1
+asn1ct_parser2.erl:2685: Call to missing or unexported function ordsets:list_to_set/1
+asn1ct_parser2.erl:281: The variable Other can never match since previous clauses completely covered the type [any()]
+asn1ct_parser2.erl:529: The variable _ can never match since previous clauses completely covered the type #constraint{}
+asn1ct_parser2.erl:555: The variable _ can never match since previous clauses completely covered the type #constraint{}
+asn1ct_parser2.erl:796: The variable _ can never match since previous clauses completely covered the type {_,_}
+asn1ct_parser2.erl:814: The variable _ can never match since previous clauses completely covered the type {_,_}
+asn1ct_parser2.erl:831: The variable _ can never match since previous clauses completely covered the type {_,_}
+asn1ct_value.erl:247: The pattern <'undefined', Default> can never match the type <maybe_improper_list(),[1..255,...]>
+asn1rt_ber_bin.erl:1125: Cons will produce an improper list since its 2nd argument is binary() | tuple()
+asn1rt_ber_bin.erl:1276: The pattern <{{_Min1, Max1}, {Min2, Max2}}, BitListVal, _DoTag> can never match since previous clauses completely covered the type <{_,_},maybe_improper_list(),_>
+asn1rt_ber_bin.erl:2057: The call asn1rt_ber_bin:check_if_valid_tag2('false',[],[],OptOrMand::any()) will never return since it differs in the 2nd argument from the success typing arguments: ('false' | {'APPLICATION',_} | {'CONTEXT',_} | {'PRIVATE',_} | {'UNIVERSAL',_},nonempty_maybe_improper_list(),[] | {_,_,_},any())
+asn1rt_ber_bin.erl:969: The pattern {Val01, Buffer01, Rb01} can never match the type {'MINUS-INFINITY' | 'PLUS-INFINITY' | 0,binary()}
+asn1rt_ber_bin.erl:998: The pattern {FirstLen, {Exp, Buffer3}, RemBytes2} can never match the type {1..1114111,{integer(),binary(),number()},number()}
+asn1rt_ber_bin_v2.erl:1230: The pattern <{{_Min1, Max1}, {Min2, Max2}}, BitListVal, TagIn> can never match since previous clauses completely covered the type <{_,_},maybe_improper_list(),_>
+asn1rt_ber_bin_v2.erl:328: The variable _ can never match since previous clauses completely covered the type {{0 | 1,non_neg_integer(),'indefinite' | non_neg_integer(),binary()},binary() | []}
+asn1rt_ber_bin_v2.erl:337: The variable _ can never match since previous clauses completely covered the type {{0 | 1,non_neg_integer(),'indefinite' | non_neg_integer(),binary()},binary() | []}
+asn1rt_ber_bin_v2.erl:392: The variable _ can never match since previous clauses completely covered the type {{0 | 1,non_neg_integer(),'indefinite' | non_neg_integer(),binary()},binary() | []}
+asn1rt_ber_bin_v2.erl:963: Function decode_real/3 has no local return
+asn1rt_check.erl:100: The variable _ can never match since previous clauses completely covered the type [any()]
+asn1rt_check.erl:85: The variable _ can never match since previous clauses completely covered the type [any()]
+asn1rt_driver_handler.erl:32: The pattern 'already_done' can never match the type {'error',_}
+asn1rt_per.erl:1065: The pattern {'BMPString', {'octets', Ol}} can never match the type {_,[{'bits',1 | 2 | 4 | 8 | 16 | 32,_}]}
+asn1rt_per.erl:1066: Function will never be called
+asn1rt_per.erl:1231: The call erlang:'not'('implemented') will never return since it differs in the 1st argument from the success typing arguments: (boolean())
+asn1rt_per.erl:1233: The call erlang:'not'('implemented') will never return since it differs in the 1st argument from the success typing arguments: (boolean())
+asn1rt_per.erl:1235: The call erlang:'not'('implemented') will never return since it differs in the 1st argument from the success typing arguments: (boolean())
+asn1rt_per.erl:1237: The call erlang:'not'('implemented') will never return since it differs in the 1st argument from the success typing arguments: (boolean())
+asn1rt_per.erl:989: The pattern <_C, 'true', _Val> can never match the type <_,'false',_>
+asn1rt_per_bin.erl:1361: The pattern <_, 'true', _> can never match the type <_,'false',_>
+asn1rt_per_bin.erl:1436: The pattern {'BMPString', {'octets', Ol}} can never match the type {'BMPString' | 'IA5String' | 'NumericString' | 'PrintableString' | 'UniversalString' | 'VisibleString',[{'bits',1 | 2 | 4 | 8 | 16 | 32,_}]}
+asn1rt_per_bin.erl:1437: Function will never be called
+asn1rt_per_bin.erl:161: The call asn1rt_per_bin:getbit({0,maybe_improper_list()}) will never return since it differs in the 1st argument from the success typing arguments: (<<_:8,_:_*8>> | {non_neg_integer(),<<_:1,_:_*1>>})
+asn1rt_per_bin.erl:1812: The pattern {Name, Val} can never match since previous clauses completely covered the type any()
+asn1rt_per_bin.erl:2106: Cons will produce an improper list since its 2nd argument is binary()
+asn1rt_per_bin.erl:2111: Cons will produce an improper list since its 2nd argument is binary()
+asn1rt_per_bin.erl:2111: Cons will produce an improper list since its 2nd argument is integer()
+asn1rt_per_bin.erl:2117: Cons will produce an improper list since its 2nd argument is integer()
+asn1rt_per_bin.erl:2121: Cons will produce an improper list since its 2nd argument is 0
+asn1rt_per_bin.erl:2123: Cons will produce an improper list since its 2nd argument is 0
+asn1rt_per_bin.erl:2127: Cons will produce an improper list since its 2nd argument is 0
+asn1rt_per_bin.erl:2129: Cons will produce an improper list since its 2nd argument is integer()
+asn1rt_per_bin.erl:446: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_bin.erl:467: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_bin.erl:474: The pattern <{_N, <<_:8/integer-unit:1,Bs/binary-unit:8>>}, C> can never match since previous clauses completely covered the type <{0,_},integer()>
+asn1rt_per_bin.erl:487: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_bin.erl:498: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_bin_rt2ct.erl:152: The call asn1rt_per_bin_rt2ct:getbit({0,maybe_improper_list()}) will never return since it differs in the 1st argument from the success typing arguments: (<<_:8,_:_*8>> | {non_neg_integer(),<<_:1,_:_*1>>})
+asn1rt_per_bin_rt2ct.erl:1533: The pattern {'BMPString', {'octets', Ol}} can never match the type {_,[[any(),...]]}
+asn1rt_per_bin_rt2ct.erl:1534: Function will never be called
+asn1rt_per_bin_rt2ct.erl:1875: The pattern {Name, Val} can never match since previous clauses completely covered the type any()
+asn1rt_per_bin_rt2ct.erl:443: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_bin_rt2ct.erl:464: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_bin_rt2ct.erl:471: The pattern <{_N, <<_B:8/integer-unit:1,Bs/binary-unit:8>>}, C> can never match since previous clauses completely covered the type <{0,_},integer()>
+asn1rt_per_bin_rt2ct.erl:484: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_bin_rt2ct.erl:495: The variable _ can never match since previous clauses completely covered the type integer()
+asn1rt_per_v1.erl:1209: The pattern <_, 'true', _> can never match the type <_,'false',_>
+asn1rt_per_v1.erl:1290: The pattern {'BMPString', {'octets', Ol}} can never match the type {'BMPString' | 'IA5String' | 'NumericString' | 'PrintableString' | 'UniversalString' | 'VisibleString',[{'bits',1 | 2 | 4 | 8 | 16 | 32,_}]}
+asn1rt_per_v1.erl:1291: Function will never be called
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/results/inets b/lib/dialyzer/test/r9c_tests_SUITE_data/results/inets
new file mode 100644
index 0000000000..fd5e36a3cd
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/results/inets
@@ -0,0 +1,59 @@
+
+ftp.erl:1243: The pattern {'ok', {N, Bytes}} can never match the type 'eof' | {'error',atom()} | {'ok',binary() | string()}
+ftp.erl:640: The pattern {'closed', _Why} can never match the type 'perm_fname_not_allowed' | 'perm_neg_compl' | 'perm_no_space' | 'pos_compl' | 'pos_interm' | 'pos_interm_acct' | 'trans_neg_compl' | 'trans_no_space' | {'error' | 'perm_fname_not_allowed' | 'perm_neg_compl' | 'perm_no_space' | 'pos_compl' | 'pos_interm' | 'pos_interm_acct' | 'pos_prel' | 'trans_neg_compl' | 'trans_no_space',atom() | [any()] | {'invalid_server_response',[any(),...]}}
+http.erl:117: The pattern {'error', Reason} can never match the type #req_headers{connection::[45 | 97 | 101 | 105 | 107 | 108 | 112 | 118,...],content_length::[48,...],other::[{_,_}]}
+http.erl:138: Function close_session/2 will never be called
+http_lib.erl:286: The call http_lib:close('ip_comm' | {'ssl',_},any()) will never return since it differs in the 1st argument from the success typing arguments: ('http' | 'https',any())
+http_lib.erl:424: The variable _ can never match since previous clauses completely covered the type any()
+http_lib.erl:438: The variable _ can never match since previous clauses completely covered the type any()
+http_lib.erl:99: Function getHeaderValue/2 will never be called
+httpc_handler.erl:322: Function status_continue/2 has no local return
+httpc_handler.erl:37: Function init_connection/2 has no local return
+httpc_handler.erl:65: Function next_response_with_request/2 has no local return
+httpc_handler.erl:660: Function exit_session_ok/2 has no local return
+httpc_manager.erl:145: The pattern {ErrorReply, State2} can never match the type {{'ok',number()},number(),#state{reqid::number()}}
+httpc_manager.erl:160: The pattern {ErrorReply, State2} can never match the type {{'ok',number()},number(),#state{reqid::number()}}
+httpc_manager.erl:478: The pattern {'error', Reason} can never match the type 'ok' | {number(),#session{clientclose::boolean(),pipeline::[],quelength::1}}
+httpc_manager.erl:490: The pattern {'error', Reason} can never match the type 'ok' | {number(),#session{clientclose::boolean(),pipeline::[],quelength::1}}
+httpd.erl:583: The pattern <{'error', Reason}, _Fd, SoFar> can never match the type <[any()],pid(),[[any(),...]]>
+httpd_acceptor.erl:105: The pattern {'error', Reason} can never match the type {'ok',pid()}
+httpd_acceptor.erl:110: Function handle_connection_err/4 will never be called
+httpd_acceptor.erl:168: Function report_error/2 will never be called
+httpd_acceptor.erl:91: The call httpd_acceptor:handle_error({'EXIT',_},ConfigDb::any(),SocketType::any()) will never return since it differs in the 1st argument from the success typing arguments: ('econnaborted' | 'emfile' | 'esslaccept' | 'timeout' | {'enfile',_},any(),any())
+httpd_manager.erl:885: The pattern {'EXIT', Reason} can never match since previous clauses completely covered the type any()
+httpd_manager.erl:919: Function auth_status/1 will never be called
+httpd_manager.erl:926: Function sec_status/1 will never be called
+httpd_manager.erl:933: Function acceptor_status/1 will never be called
+httpd_request_handler.erl:374: The call httpd_response:send_status(Info::#mod{parsed_header::maybe_improper_list()},417,[32 | 66 | 98 | 100 | 103 | 105 | 111 | 116 | 121,...]) will never return since it differs in the 2nd argument from the success typing arguments: (#mod{socket_type::'ip_comm' | {'ssl',_}},100 | 301 | 304 | 400 | 401 | 403 | 404 | 412 | 414 | 416 | 500 | 501 | 503,any())
+httpd_request_handler.erl:378: The call httpd_response:send_status(Info::#mod{parsed_header::maybe_improper_list()},417,[32 | 77 | 97 | 100 | 101 | 104 | 108 | 110 | 111 | 116 | 119,...]) will never return since it differs in the 2nd argument from the success typing arguments: (#mod{socket_type::'ip_comm' | {'ssl',_}},100 | 301 | 304 | 400 | 401 | 403 | 404 | 412 | 414 | 416 | 500 | 501 | 503,any())
+httpd_request_handler.erl:401: The call httpd_response:send_status(Info::#mod{parsed_header::maybe_improper_list()},417,[32 | 77 | 97 | 100 | 101 | 104 | 108 | 110 | 111 | 116 | 119,...]) will never return since it differs in the 2nd argument from the success typing arguments: (#mod{socket_type::'ip_comm' | {'ssl',_}},100 | 301 | 304 | 400 | 401 | 403 | 404 | 412 | 414 | 416 | 500 | 501 | 503,any())
+httpd_request_handler.erl:644: The call lists:reverse(Fields0::{'error',_} | {'ok',[[any()]]}) will never return since it differs in the 1st argument from the success typing arguments: ([any()])
+httpd_request_handler.erl:645: Function will never be called
+httpd_sup.erl:63: The variable Else can never match since previous clauses completely covered the type {'error',_} | {'ok',[any()],_,_}
+httpd_sup.erl:88: The pattern {'error', Reason} can never match the type {'ok',_,_}
+httpd_sup.erl:92: The variable Else can never match since previous clauses completely covered the type {'ok',_,_}
+mod_auth.erl:559: The pattern {'error', Reason} can never match the type {_,integer(),maybe_improper_list(),_}
+mod_auth_dets.erl:120: The call lists:foreach(fun((_) -> 'true' | {'error','no_such_group' | 'no_such_group_member'}),{'ok',[any()]}) will never return since it differs in the 2nd argument from the success typing arguments: (fun((_) -> any()),[any()])
+mod_auth_plain.erl:100: The variable _ can never match since previous clauses completely covered the type {'ok',[any()]}
+mod_auth_plain.erl:159: The variable _ can never match since previous clauses completely covered the type [any()]
+mod_auth_plain.erl:83: The variable O can never match since previous clauses completely covered the type [any()]
+mod_cgi.erl:372: The pattern {'http_response', NewAccResponse} can never match the type 'ok'
+mod_dir.erl:101: The call lists:flatten(nonempty_improper_list(atom() | binary() | [any()] | char(),atom())) will never return since it differs in the 1st argument from the success typing arguments: ([any()])
+mod_dir.erl:72: The pattern {'error', Reason} can never match the type {'ok',[[[any()] | char()],...]}
+mod_get.erl:135: The pattern <{'enfile', _}, _Info, Path> can never match the type <atom(),#mod{},atom() | binary() | [atom() | binary() | [any()] | char()]>
+mod_head.erl:80: The pattern <{'enfile', _}, _Info, Path> can never match the type <atom(),#mod{},atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()]>
+mod_htaccess.erl:460: The pattern {'error', BadData} can never match the type {'ok',_}
+mod_include.erl:193: The pattern {_, Name, {[], []}} can never match the type {[any()],[any()],maybe_improper_list()}
+mod_include.erl:195: The pattern {_, Name, {PathInfo, []}} can never match the type {[any()],[any()],maybe_improper_list()}
+mod_include.erl:197: The pattern {_, Name, {PathInfo, QueryString}} can never match the type {[any()],[any()],maybe_improper_list()}
+mod_include.erl:201: The variable Gurka can never match since previous clauses completely covered the type {[any()],[any()],maybe_improper_list()}
+mod_include.erl:692: The pattern <{'read', Reason}, Info, Path> can never match the type <{'open',atom()},#mod{},atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()]>
+mod_include.erl:706: The pattern <{'enfile', _}, _Info, Path> can never match the type <atom(),#mod{},atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()]>
+mod_include.erl:716: Function read_error/3 will never be called
+mod_include.erl:719: Function read_error/4 will never be called
+mod_security_server.erl:386: The variable O can never match since previous clauses completely covered the type [any()]
+mod_security_server.erl:433: The variable Other can never match since previous clauses completely covered the type [any()]
+mod_security_server.erl:585: The variable _ can never match since previous clauses completely covered the type [any()]
+mod_security_server.erl:608: The variable _ can never match since previous clauses completely covered the type [any()]
+mod_security_server.erl:641: The variable _ can never match since previous clauses completely covered the type [any()]
+uri.erl:146: The pattern {'error', Error} can never match since previous clauses completely covered the type {_,{[],[]}}
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/results/mnesia b/lib/dialyzer/test/r9c_tests_SUITE_data/results/mnesia
new file mode 100644
index 0000000000..e199581a0e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/results/mnesia
@@ -0,0 +1,34 @@
+
+mnesia.erl:1319: Guard test size(Spec::[{_,_,_},...]) can never succeed
+mnesia.erl:1498: The call mnesia:bad_info_reply(Tab::atom(),Item::'type') will never return since it differs in the 2nd argument from the success typing arguments: (atom(),'memory' | 'size')
+mnesia.erl:331: Function mod2abs/1 has no local return
+mnesia_bup.erl:111: The created fun has no local return
+mnesia_bup.erl:574: Function fallback_receiver/2 has no local return
+mnesia_bup.erl:967: Function uninstall_fallback_master/2 has no local return
+mnesia_checkpoint.erl:1014: The variable Error can never match since previous clauses completely covered the type {'ok',#checkpoint_args{nodes::[any()],retainers::[any(),...]}}
+mnesia_controller.erl:1666: The variable Tab can never match since previous clauses completely covered the type [any()]
+mnesia_controller.erl:1679: The pattern {'stop', Reason, Reply, State2} can never match the type {'noreply',_} | {'reply',_,_} | {'stop','shutdown',#state{}}
+mnesia_controller.erl:1685: The pattern {'noreply', State2, _Timeout} can never match the type {'reply',_,_}
+mnesia_event.erl:77: The pattern 'remove_handler' can never match the type {'ok',_}
+mnesia_event.erl:79: The pattern {'swap_handler', Args1, State1, Mod2, Args2} can never match the type {'ok',_}
+mnesia_frag.erl:294: The call mnesia_frag:remote_collect(Ref::reference(),{'error',_},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()]))
+mnesia_frag.erl:304: The call mnesia_frag:remote_collect(Ref::reference(),{'error',{'node_not_running',_}},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()]))
+mnesia_frag.erl:312: The call mnesia_frag:remote_collect(Ref::reference(),LocalRes::{'error',_},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()]))
+mnesia_index.erl:52: The call mnesia_lib:other_val(Var::{_,'commit_work' | 'index' | 'setorbag' | 'storage_type' | {'index',_}},_ReASoN_::any()) will never return since it differs in the 1st argument from the success typing arguments: ({_,'active_replicas' | 'where_to_read' | 'where_to_write'},any())
+mnesia_lib.erl:957: The pattern {'ok', {0, _}} can never match the type 'eof' | {'error',atom()} | {'ok',binary() | string()}
+mnesia_lib.erl:959: The pattern {'ok', {_, Bin}} can never match the type 'eof' | {'error',atom()} | {'ok',binary() | string()}
+mnesia_loader.erl:36: The call mnesia_lib:other_val(Var::{_,'access_mode' | 'cstruct' | 'db_nodes' | 'setorbag' | 'snmp' | 'storage_type'},Reason::any()) will never return since it differs in the 1st argument from the success typing arguments: ({_,'active_replicas' | 'where_to_read' | 'where_to_write'},any())
+mnesia_locker.erl:1017: Function system_terminate/4 has no local return
+mnesia_log.erl:707: The test {'error',{[1..255,...],[any(),...]}} | {'ok',_} == atom() can never evaluate to 'true'
+mnesia_log.erl:727: The created fun has no local return
+mnesia_monitor.erl:162: The pattern <[], []> can never match the type <[any(),...],[any(),...]>
+mnesia_monitor.erl:354: The pattern {'error', Reason} can never match the type 'ok'
+mnesia_recover.erl:159: The call mnesia_lib:other_val(Var::'latest_transient_decision' | 'max_wait_for_decision' | 'previous_transient_decisions' | 'recover_nodes',Reason::any()) will never return since it differs in the 1st argument from the success typing arguments: ({_,'active_replicas' | 'where_to_read' | 'where_to_write'},any())
+mnesia_recover.erl:884: The pattern {'stop', Reason, Reply, State2} can never match the type {'noreply',_} | {'stop','shutdown',#state{}}
+mnesia_schema.erl:1088: Guard test Storage::'disc_copies' | 'disc_only_copies' | 'ram_copies' == 'unknown' can never succeed
+mnesia_schema.erl:1258: Guard test FromS::'disc_copies' | 'disc_only_copies' | 'ram_copies' == 'unknown' can never succeed
+mnesia_schema.erl:1639: The pattern {'false', 'mandatory'} can never match the type {'false','optional'}
+mnesia_schema.erl:2434: The variable Reason can never match since previous clauses completely covered the type {'error',_} | {'ok',_}
+mnesia_schema.erl:451: Guard test UseDirAnyway::'false' == 'true' can never succeed
+mnesia_tm.erl:1522: Function commit_participant/5 has no local return
+mnesia_tm.erl:2169: Function system_terminate/4 has no local return
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Makefile b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Makefile
new file mode 100644
index 0000000000..b539e88108
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Makefile
@@ -0,0 +1,151 @@
+#
+# Copyright (C) 1997, Ericsson Telecommunications
+# Author: Kenneth Lundin
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+VSN=$(ASN1_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/asn1-$(VSN)
+
+
+
+
+#
+# Common Macros
+#
+# PARSER_SRC = \
+# asn1ct_parser.yrl
+
+# PARSER_MODULE=$(PARSER_SRC:%.yrl=%)
+
+EBIN = ../ebin
+CT_MODULES= \
+ asn1ct \
+ asn1ct_check \
+ asn1_db \
+ asn1ct_pretty_format \
+ asn1ct_gen \
+ asn1ct_gen_per \
+ asn1ct_gen_per_rt2ct \
+ asn1ct_name \
+ asn1ct_constructed_per \
+ asn1ct_constructed_ber \
+ asn1ct_gen_ber \
+ asn1ct_constructed_ber_bin_v2 \
+ asn1ct_gen_ber_bin_v2 \
+ asn1ct_value \
+ asn1ct_tok \
+ asn1ct_parser2
+
+RT_MODULES= \
+ asn1rt \
+ asn1rt_per \
+ asn1rt_per_bin \
+ asn1rt_per_v1 \
+ asn1rt_ber_bin \
+ asn1rt_ber_bin_v2 \
+ asn1rt_per_bin_rt2ct \
+ asn1rt_driver_handler \
+ asn1rt_check
+
+# asn1rt_ber_v1 \
+# asn1rt_ber \
+# the rt module to use is defined in asn1_records.hrl
+# and must be updated when an incompatible change is done in the rt modules
+
+
+MODULES= $(CT_MODULES) $(RT_MODULES)
+
+ERL_FILES = $(MODULES:%=%.erl)
+
+TARGET_FILES = $(MODULES:%=$(EBIN)/%.$(EMULATOR))
+
+GENERATED_PARSER = $(PARSER_MODULE:%=%.erl)
+
+# internal hrl file
+HRL_FILES = asn1_records.hrl
+
+APP_FILE = asn1.app
+APPUP_FILE = asn1.appup
+
+APP_SRC = $(APP_FILE).src
+APP_TARGET = $(EBIN)/$(APP_FILE)
+
+APPUP_SRC = $(APPUP_FILE).src
+APPUP_TARGET = $(EBIN)/$(APPUP_FILE)
+
+EXAMPLES = \
+ ../examples/P-Record.asn
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_FLAGS +=
+ERL_COMPILE_FLAGS += \
+ -I$(ERL_TOP)/lib/stdlib \
+ +warn_unused_vars
+YRL_FLAGS =
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt: $(TARGET_FILES) $(APP_TARGET) $(APPUP_TARGET)
+
+
+clean:
+ rm -f $(TARGET_FILES) $(APP_TARGET) $(APPUP_TARGET) $(GENERATED_PARSER)
+ rm -f core *~
+
+docs:
+
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+$(EBIN)/asn1ct.$(EMULATOR):asn1ct.erl
+ $(ERLC) -b$(EMULATOR) -o$(EBIN) $(ERL_COMPILE_FLAGS) -Dvsn=\"$(VSN)\" $<
+
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(TARGET_FILES) $(APP_TARGET) $(APPUP_TARGET) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(PARSER_SRC) $(ERL_FILES) $(HRL_FILES) $(APP_SRC) $(APPUP_SRC) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/examples
+ $(INSTALL_DATA) $(EXAMPLES) $(RELSYSDIR)/examples
+
+# there are no include files to be used by the user
+#$(INSTALL_DIR) $(RELSYSDIR)/include
+#$(INSTALL_DATA) $(HRL_FILES) $(RELSYSDIR)/include
+
+release_docs_spec:
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Restrictions.txt b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Restrictions.txt
new file mode 100644
index 0000000000..73b725245d
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/Restrictions.txt
@@ -0,0 +1,55 @@
+The following restrictions apply to this implementation of the ASN.1 compiler:
+
+Supported encoding rules are:
+BER
+PER (aligned)
+
+PER (unaligned) IS NOT SUPPORTED
+
+Supported types are:
+
+INTEGER
+BOOLEAN
+ENUMERATION
+SEQUENCE
+SEQUENCE OF
+SET
+SET OF
+CHOICE
+OBJECT IDENTIFIER
+RestrictedCharacterStringTypes
+UnrestrictedCharacterStringTypes
+
+
+NOT SUPPORTED types are:
+ANY IS (IS NOT IN THE STANDARD ANY MORE)
+ANY DEFINED BY (IS NOT IN THE STANDARD ANY MORE)
+EXTERNAL
+EMBEDDED-PDV
+REAL
+
+The support for value definitions in the ASN.1 notation is very limited.
+
+The support for constraints is limited to:
+SizeConstraint SIZE(X)
+SingleValue (1)
+ValueRange (X..Y)
+PermittedAlpabet FROM
+
+The only supported value-notation for SEQUENCE and SET in Erlang is
+the record variant.
+The list notation with named components used by the old ASN.1 compiler
+was supported in the first versions of this compiler both are no longer
+supported.
+
+The decode functions always return a symbolic value if they can.
+
+
+Files with ASN.1 source must have a suffix .asn1 the suffix .py used by the
+old ASN.1 compiler is supported in this version but will not be supported in the future.
+
+Generated files:
+X.asn1db % the intermediate format of a compiled ASN.1 module
+X.hrl % generated Erlang include file for module X
+X.erl % generated Erlang module with encode decode functions for
+ % ASN.1 module X
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.app.src b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.app.src
new file mode 100644
index 0000000000..2ec06ff4db
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.app.src
@@ -0,0 +1,20 @@
+{application, asn1,
+ [{description, "The Erlang ASN1 compiler version %VSN%"},
+ {vsn, "%VSN%"},
+ {modules, [
+ asn1rt,
+ asn1rt_per,
+ asn1rt_per_v1,
+ asn1rt_per_bin,
+ asn1rt_per_bin_rt2ct,
+ asn1rt_ber_bin,
+ asn1rt_ber_bin_v2,
+ asn1rt_check,
+ asn1rt_driver_handler
+ ]},
+ {registered, [
+ asn1_driver_owner
+ ]},
+ {env, []},
+ {applications, [kernel, stdlib]}
+ ]}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.appup.src b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.appup.src
new file mode 100644
index 0000000000..255dec709e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1.appup.src
@@ -0,0 +1,166 @@
+{"%VSN%",
+ [
+ {"1.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin},
+ {add_module, asn1rt_check}
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin},
+ {add_module, asn1rt_check}
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.1.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin},
+ {add_module, asn1rt_check}
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.2",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ }
+ ],
+ [
+ {"1.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin, soft_purge, soft_purge}},
+ {remove, {asn1rt_check, soft_purge, soft_purge}}
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin, soft_purge, soft_purge}},
+ {remove, {asn1rt_check, soft_purge, soft_purge}}
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.1.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin, soft_purge, soft_purge}},
+ {remove, {asn1rt_check, soft_purge, soft_purge}}
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.2",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ }
+
+ ]}.
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_db.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_db.erl
new file mode 100644
index 0000000000..cf01e39fed
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_db.erl
@@ -0,0 +1,162 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1_db.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1_db).
+%-compile(export_all).
+-export([dbnew/1,dbsave/2,dbload/1,dbput/3,dbget/2,dbget_all/1]).
+-export([dbget_all_mod/1,dbstop/0,dbclear/0,dberase_module/1,dbstart/1,stop_server/1]).
+%% internal exports
+-export([dbloop0/1,dbloop/2]).
+
+%% Db stuff
+dbstart(Includes) ->
+ start_server(asn1db, asn1_db, dbloop0, [Includes]).
+
+dbloop0(Includes) ->
+ dbloop(Includes, ets:new(asn1, [set,named_table])).
+
+opentab(Tab,Mod,[]) ->
+ opentab(Tab,Mod,["."]);
+opentab(Tab,Mod,Includes) ->
+ Base = lists:concat([Mod,".asn1db"]),
+ opentab2(Tab,Base,Mod,Includes,ok).
+
+opentab2(_Tab,_Base,_Mod,[],Error) ->
+ Error;
+opentab2(Tab,Base,Mod,[Ih|It],_Error) ->
+ File = filename:join(Ih,Base),
+ case ets:file2tab(File) of
+ {ok,Modtab} ->
+ ets:insert(Tab,{Mod, Modtab}),
+ {ok,Modtab};
+ NewErr ->
+ opentab2(Tab,Base,Mod,It,NewErr)
+ end.
+
+
+dbloop(Includes, Tab) ->
+ receive
+ {From,{set, Mod, K2, V}} ->
+ [{_,Modtab}] = ets:lookup(Tab,Mod),
+ ets:insert(Modtab,{K2, V}),
+ From ! {asn1db, ok},
+ dbloop(Includes, Tab);
+ {From, {get, Mod, K2}} ->
+ Result = case ets:lookup(Tab,Mod) of
+ [] ->
+ opentab(Tab,Mod,Includes);
+ [{_,Modtab}] -> {ok,Modtab}
+ end,
+ case Result of
+ {ok,Newtab} ->
+ From ! {asn1db, lookup(Newtab, K2)};
+ _Error ->
+ From ! {asn1db, undefined}
+ end,
+ dbloop(Includes, Tab);
+ {From, {all_mod, Mod}} ->
+ [{_,Modtab}] = ets:lookup(Tab,Mod),
+ From ! {asn1db, ets:tab2list(Modtab)},
+ dbloop(Includes, Tab);
+ {From, {delete_mod, Mod}} ->
+ [{_,Modtab}] = ets:lookup(Tab,Mod),
+ ets:delete(Modtab),
+ ets:delete(Tab,Mod),
+ From ! {asn1db, ok},
+ dbloop(Includes, Tab);
+ {From, {save, OutFile,Mod}} ->
+ [{_,Mtab}] = ets:lookup(Tab,Mod),
+ {From ! {asn1db, ets:tab2file(Mtab,OutFile)}},
+ dbloop(Includes,Tab);
+ {From, {load, Mod}} ->
+ Result = case ets:lookup(Tab,Mod) of
+ [] ->
+ opentab(Tab,Mod,Includes);
+ [{_,Modtab}] -> {ok,Modtab}
+ end,
+ {From, {asn1db,Result}},
+ dbloop(Includes,Tab);
+ {From, {new, Mod}} ->
+ case ets:lookup(Tab,Mod) of
+ [{_,Modtab}] ->
+ ets:delete(Modtab);
+ _ ->
+ true
+ end,
+ Tabname = list_to_atom(lists:concat(["asn1_",Mod])),
+ ets:new(Tabname, [set,named_table]),
+ ets:insert(Tab,{Mod,Tabname}),
+ From ! {asn1db, ok},
+ dbloop(Includes,Tab);
+ {From, stop} ->
+ From ! {asn1db, ok}; %% nothing to store
+ {From, clear} ->
+ ModTabList = [Mt||{_,Mt} <- ets:tab2list(Tab)],
+ lists:foreach(fun(T) -> ets:delete(T) end,ModTabList),
+ ets:delete(Tab),
+ From ! {asn1db, cleared},
+ dbloop(Includes, ets:new(asn1, [set]))
+ end.
+
+
+%%all(Tab, K) ->
+%% pickup(K, ets:match(Tab, {{K, '$1'}, '$2'})).
+%%pickup(K, []) -> [];
+%%pickup(K, [[V1,V2] |T]) ->
+%% [{{K,V1},V2} | pickup(K, T)].
+
+lookup(Tab, K) ->
+ case ets:lookup(Tab, K) of
+ [] -> undefined;
+ [{K,V}] -> V
+ end.
+
+
+dbnew(Module) -> req({new,Module}).
+dbsave(OutFile,Module) -> req({save,OutFile,Module}).
+dbload(Module) -> req({load,Module}).
+
+dbput(Module,K,V) -> req({set, Module, K, V}).
+dbget(Module,K) -> req({get, Module, K}).
+dbget_all(K) -> req({get_all, K}).
+dbget_all_mod(Mod) -> req({all_mod,Mod}).
+dbstop() -> stop_server(asn1db).
+dbclear() -> req(clear).
+dberase_module({module,M})->
+ req({delete_mod, M}).
+
+req(R) ->
+ asn1db ! {self(), R},
+ receive {asn1db, Reply} -> Reply end.
+
+stop_server(Name) ->
+ stop_server(Name, whereis(Name)).
+stop_server(_, undefined) -> stopped;
+stop_server(Name, _Pid) ->
+ Name ! {self(), stop},
+ receive {Name, _} -> stopped end.
+
+
+start_server(Name,Mod,Fun,Args) ->
+ case whereis(Name) of
+ undefined ->
+ register(Name, spawn(Mod,Fun, Args));
+ _Pid ->
+ already_started
+ end.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_records.hrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_records.hrl
new file mode 100644
index 0000000000..07ca8cccf3
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1_records.hrl
@@ -0,0 +1,96 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1_records.hrl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-define('RT_BER',"asn1rt_ber_v1").
+-define('RT_BER_BIN',"asn1rt_ber_bin").
+-define('RT_PER',"asn1rt_per_v1").
+%% change to this when we have this module -define('RT_PER_BIN',"asn1rt_per_bin").
+-define('RT_PER_BIN',"asn1rt_per_bin").
+
+-record(module,{pos,name,defid,tagdefault='EXPLICIT',exports={exports,[]},imports={imports,[]}, extensiondefault=empty,typeorval}).
+
+-record('SEQUENCE',{pname=false,tablecinf=false,components=[]}).
+-record('SET',{pname=false,sorted=false,tablecinf=false,components=[]}).
+-record('ComponentType',{pos,name,typespec,prop,tags}).
+-record('ObjectClassFieldType',{classname,class,fieldname,type}).
+
+-record(typedef,{checked=false,pos,name,typespec}).
+-record(classdef,{checked=false,pos,name,typespec}).
+-record(valuedef,{checked=false,pos,name,type,value}).
+-record(ptypedef,{checked=false,pos,name,args,typespec}).
+-record(pvaluedef,{checked=false,pos,name,args,type,value}).
+-record(pvaluesetdef,{checked=false,pos,name,args,type,valueset}).
+-record(pobjectdef,{checked=false,pos,name,args,class,def}).
+-record(pobjectsetdef,{checked=false,pos,name,args,class,def}).
+
+-record(typereference,{pos,val}).
+-record(identifier,{pos,val}).
+-record(constraint,{c,e}).
+-record('Constraint',{'SingleValue'=no,'SizeConstraint'=no,'ValueRange'=no,'PermittedAlphabet'=no,
+ 'ContainedSubtype'=no, 'TypeConstraint'=no,'InnerSubtyping'=no,e=no,'Other'=no}).
+-record(simpletableattributes,{objectsetname,c_name,c_index,usedclassfield,
+ uniqueclassfield,valueindex}).
+-record(type,{tag=[],def,constraint=[],tablecinf=[],inlined=no}).
+
+-record(objectclass,{fields=[],syntax}).
+-record('Object',{classname,gen=true,def}).
+-record('ObjectSet',{class,gen=true,uniquefname,set}).
+
+-record(tag,{class,number,type,form=32}). % form = ?CONSTRUCTED
+% This record holds information about allowed constraint types per type
+-record(cmap,{single_value=no,contained_subtype=no,value_range=no,
+ size=no,permitted_alphabet=no,type_constraint=no,
+ inner_subtyping=no}).
+
+
+-record('EXTENSIONMARK',{pos,val}).
+
+% each IMPORT contains a list of 'SymbolsFromModule'
+-record('SymbolsFromModule',{symbols,module,objid}).
+
+% Externaltypereference -> modulename '.' typename
+-record('Externaltypereference',{pos,module,type}).
+% Externalvaluereference -> modulename '.' typename
+-record('Externalvaluereference',{pos,module,value}).
+
+-record(state,{module,mname,type,tname,value,vname,erule,parameters=[],
+ inputmodules,abscomppath=[],recordtopname=[],options}).
+
+%% state record used by backend at partial decode
+%% active is set to 'yes' when a partial decode function is generated.
+%% prefix is set to 'dec-inc-' or 'dec-partial-' is for
+%% incomplete partial decode or partial decode respectively
+%% inc_tag_pattern holds the tags of the significant types/components
+%% for incomplete partial decode.
+%% tag_pattern holds the tags for partial decode.
+%% inc_type_pattern and type_pattern holds the names of the
+%% significant types/components.
+%% func_name holds the name of the function for the toptype.
+%% namelist holds the list of names of types/components that still
+%% haven't been generated.
+%% tobe_refed_funcs is a list of tuples {function names
+%% (Types),namelist of incomplete decode spec}, with function names
+%% that are referenced within other generated partial incomplete
+%% decode functions. They shall be generated as partial incomplete
+%% decode functions.
+
+%% gen_refed_funcs is as list of function names. Unlike
+%% tobe_refed_funcs these have been generated.
+-record(gen_state,{active=false,prefix,inc_tag_pattern,
+ tag_pattern,inc_type_pattern,
+ type_pattern,func_name,namelist,
+ tobe_refed_funcs=[],gen_refed_funcs=[]}).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct.erl
new file mode 100644
index 0000000000..37189e3780
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct.erl
@@ -0,0 +1,1904 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct).
+
+%% Compile Time functions for ASN.1 (e.g ASN.1 compiler).
+
+%%-compile(export_all).
+%% Public exports
+-export([compile/1, compile/2]).
+-export([start/0, start/1, stop/0]).
+-export([encode/2, encode/3, decode/3]).
+-export([test/1, test/2, test/3, value/2]).
+%% Application internal exports
+-export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3,value/1,vsn/0,
+ create_ets_table/2,get_name_of_def/1,get_pos_of_def/1]).
+-export([read_config_data/1,get_gen_state_field/1,get_gen_state/0,
+ partial_inc_dec_toptype/1,save_gen_state/1,update_gen_state/2,
+ get_tobe_refed_func/1,reset_gen_state/0,is_function_generated/1,
+ generated_refed_func/1,next_refed_func/0,pop_namelist/0,
+ next_namelist_el/0,update_namelist/1,step_in_constructed/0,
+ add_tobe_refed_func/1,add_generated_refed_func/1]).
+
+-include("asn1_records.hrl").
+-include_lib("stdlib/include/erl_compile.hrl").
+
+-import(asn1ct_gen_ber_bin_v2,[encode_tag_val/3,decode_class/1]).
+
+-define(unique_names,0).
+-define(dupl_uniquedefs,1).
+-define(dupl_equaldefs,2).
+-define(dupl_eqdefs_uniquedefs,?dupl_equaldefs bor ?dupl_uniquedefs).
+
+-define(CONSTRUCTED, 2#00100000).
+
+%% macros used for partial decode commands
+-define(CHOOSEN,choosen).
+-define(SKIP,skip).
+-define(SKIP_OPTIONAL,skip_optional).
+
+%% macros used for partial incomplete decode commands
+-define(MANDATORY,mandatory).
+-define(DEFAULT,default).
+-define(OPTIONAL,opt).
+-define(PARTS,parts).
+-define(UNDECODED,undec).
+-define(ALTERNATIVE,alt).
+-define(ALTERNATIVE_UNDECODED,alt_undec).
+-define(ALTERNATIVE_PARTS,alt_parts).
+%-define(BINARY,bin).
+
+%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This is the interface to the compiler
+%%
+%%
+
+
+compile(File) ->
+ compile(File,[]).
+
+compile(File,Options) when list(Options) ->
+ Options1 =
+ case {lists:member(optimize,Options),lists:member(ber_bin,Options)} of
+ {true,true} ->
+ [ber_bin_v2|Options--[ber_bin]];
+ _ -> Options
+ end,
+ case (catch input_file_type(File)) of
+ {single_file,PrefixedFile} ->
+ (catch compile1(PrefixedFile,Options1));
+ {multiple_files_file,SetBase,FileName} ->
+ FileList = get_file_list(FileName),
+ (catch compile_set(SetBase,filename:dirname(FileName),
+ FileList,Options1));
+ Err = {input_file_error,_Reason} ->
+ {error,Err}
+ end.
+
+
+compile1(File,Options) when list(Options) ->
+ io:format("Erlang ASN.1 version ~p compiling ~p ~n",[?vsn,File]),
+ io:format("Compiler Options: ~p~n",[Options]),
+ Ext = filename:extension(File),
+ Base = filename:basename(File,Ext),
+ OutFile = outfile(Base,"",Options),
+ DbFile = outfile(Base,"asn1db",Options),
+ Includes = [I || {i,I} <- Options],
+ EncodingRule = get_rule(Options),
+ create_ets_table(asn1_functab,[named_table]),
+ Continue1 = scan({true,true},File,Options),
+ Continue2 = parse(Continue1,File,Options),
+ Continue3 = check(Continue2,File,OutFile,Includes,EncodingRule,
+ DbFile,Options,[]),
+ Continue4 = generate(Continue3,OutFile,EncodingRule,Options),
+ delete_tables([asn1_functab]),
+ compile_erl(Continue4,OutFile,Options).
+
+%%****************************************************************************%%
+%% functions dealing with compiling of several input files to one output file %%
+%%****************************************************************************%%
+compile_set(SetBase,DirName,Files,Options) when list(hd(Files)),list(Options) ->
+ %% case when there are several input files in a list
+ io:format("Erlang ASN.1 version ~p compiling ~p ~n",[?vsn,Files]),
+ io:format("Compiler Options: ~p~n",[Options]),
+ OutFile = outfile(SetBase,"",Options),
+ DbFile = outfile(SetBase,"asn1db",Options),
+ Includes = [I || {i,I} <- Options],
+ EncodingRule = get_rule(Options),
+ create_ets_table(asn1_functab,[named_table]),
+ ScanRes = scan_set(DirName,Files,Options),
+ ParseRes = parse_set(ScanRes,Options),
+ Result =
+ case [X||X <- ParseRes,element(1,X)==true] of
+ [] -> %% all were false, time to quit
+ lists:map(fun(X)->element(2,X) end,ParseRes);
+ ParseRes -> %% all were true, continue with check
+ InputModules =
+ lists:map(
+ fun(F)->
+ E = filename:extension(F),
+ B = filename:basename(F,E),
+ if
+ list(B) -> list_to_atom(B);
+ true -> B
+ end
+ end,
+ Files),
+ check_set(ParseRes,SetBase,OutFile,Includes,
+ EncodingRule,DbFile,Options,InputModules);
+ Other ->
+ {error,{'unexpected error in scan/parse phase',
+ lists:map(fun(X)->element(3,X) end,Other)}}
+ end,
+ delete_tables([asn1_functab]),
+ Result.
+
+check_set(ParseRes,SetBase,OutFile,Includes,EncRule,DbFile,
+ Options,InputModules) ->
+ lists:foreach(fun({_T,M,File})->
+ cmp(M#module.name,File)
+ end,
+ ParseRes),
+ MergedModule = merge_modules(ParseRes,SetBase),
+ SetM = MergedModule#module{name=SetBase},
+ Continue1 = check({true,SetM},SetBase,OutFile,Includes,EncRule,DbFile,
+ Options,InputModules),
+ Continue2 = generate(Continue1,OutFile,EncRule,Options),
+
+ delete_tables([renamed_defs,original_imports,automatic_tags]),
+
+ compile_erl(Continue2,OutFile,Options).
+
+%% merge_modules/2 -> returns a module record where the typeorval lists are merged,
+%% the exports lists are merged, the imports lists are merged when the
+%% elements come from other modules than the merge set, the tagdefault
+%% field gets the shared value if all modules have same tagging scheme,
+%% otherwise a tagging_error exception is thrown,
+%% the extensiondefault ...(not handled yet).
+merge_modules(ParseRes,CommonName) ->
+ ModuleList = lists:map(fun(X)->element(2,X) end,ParseRes),
+ NewModuleList = remove_name_collisions(ModuleList),
+ case ets:info(renamed_defs,size) of
+ 0 -> ets:delete(renamed_defs);
+ _ -> ok
+ end,
+ save_imports(NewModuleList),
+% io:format("~p~n~p~n~p~n~n",[ets:lookup(original_imports,'M1'),ets:lookup(original_imports,'M2'),ets:tab2list(original_imports)]),
+ TypeOrVal = lists:append(lists:map(fun(X)->X#module.typeorval end,
+ NewModuleList)),
+ InputMNameList = lists:map(fun(X)->X#module.name end,
+ NewModuleList),
+ CExports = common_exports(NewModuleList),
+
+ ImportsModuleNameList = lists:map(fun(X)->
+ {X#module.imports,
+ X#module.name} end,
+ NewModuleList),
+ %% ImportsModuleNameList: [{Imports,ModuleName},...]
+ %% Imports is a tuple {imports,[#'SymbolsFromModule'{},...]}
+ CImports = common_imports(ImportsModuleNameList,InputMNameList),
+ TagDefault = check_tagdefault(NewModuleList),
+ #module{name=CommonName,tagdefault=TagDefault,exports=CExports,
+ imports=CImports,typeorval=TypeOrVal}.
+
+%% causes an exit if duplicate definition names exist in a module
+remove_name_collisions(Modules) ->
+ create_ets_table(renamed_defs,[named_table]),
+ %% Name duplicates in the same module is not allowed.
+ lists:foreach(fun exit_if_nameduplicate/1,Modules),
+ %% Then remove duplicates in different modules and return the
+ %% new list of modules.
+ remove_name_collisions2(Modules,[]).
+
+%% For each definition in the first module in module list, find
+%% all definitons with same name and rename both definitions in
+%% the first module and in rest of modules
+remove_name_collisions2([M|Ms],Acc) ->
+ TypeOrVal = M#module.typeorval,
+ MName = M#module.name,
+ %% Test each name in TypeOrVal on all modules in Ms
+ {NewM,NewMs} = remove_name_collisions2(MName,TypeOrVal,Ms,[]),
+ remove_name_collisions2(NewMs,[M#module{typeorval=NewM}|Acc]);
+remove_name_collisions2([],Acc) ->
+ finished_warn_prints(),
+ Acc.
+
+%% For each definition in list of defs find definitions in (rest of)
+%% modules that have same name. If duplicate was found rename def.
+%% Test each name in [T|Ts] on all modules in Ms
+remove_name_collisions2(ModName,[T|Ts],Ms,Acc) ->
+ Name = get_name_of_def(T),
+ case discover_dupl_in_mods(Name,T,Ms,[],?unique_names) of
+ {_,?unique_names} -> % there was no name collision
+ remove_name_collisions2(ModName,Ts,Ms,[T|Acc]);
+ {NewMs,?dupl_uniquedefs} -> % renamed defs in NewMs
+ %% rename T
+ NewT = set_name_of_def(ModName,Name,T), %rename def
+ warn_renamed_def(ModName,get_name_of_def(NewT),Name),
+ ets:insert(renamed_defs,{get_name_of_def(NewT),Name,ModName}),
+ remove_name_collisions2(ModName,Ts,NewMs,[NewT|Acc]);
+ {NewMs,?dupl_equaldefs} -> % name duplicates, but identical defs
+ %% keep name of T
+ warn_kept_def(ModName,Name),
+ remove_name_collisions2(ModName,Ts,NewMs,[T|Acc]);
+ {NewMs,?dupl_eqdefs_uniquedefs} ->
+ %% keep name of T, renamed defs in NewMs
+ warn_kept_def(ModName,Name),
+ remove_name_collisions2(ModName,Ts,NewMs,[T|Acc])
+ end;
+remove_name_collisions2(_,[],Ms,Acc) ->
+ {Acc,Ms}.
+
+%% Name is the name of a definition. If a definition with the same name
+%% is found in the modules Ms the definition will be renamed and returned.
+discover_dupl_in_mods(Name,Def,[M=#module{name=N,typeorval=TorV}|Ms],
+ Acc,AnyRenamed) ->
+ Fun = fun(T,RenamedOrDupl)->
+ case {get_name_of_def(T),compare_defs(Def,T)} of
+ {Name,not_equal} ->
+ %% rename def
+ NewT=set_name_of_def(N,Name,T),
+ warn_renamed_def(N,get_name_of_def(NewT),Name),
+ ets:insert(renamed_defs,{get_name_of_def(NewT),
+ Name,N}),
+ {NewT,?dupl_uniquedefs bor RenamedOrDupl};
+ {Name,equal} ->
+ %% delete def
+ warn_deleted_def(N,Name),
+ {[],?dupl_equaldefs bor RenamedOrDupl};
+ _ ->
+ {T,RenamedOrDupl}
+ end
+ end,
+ {NewTorV,NewAnyRenamed} = lists:mapfoldl(Fun,AnyRenamed,TorV),
+ %% have to flatten the NewTorV to remove any empty list elements
+ discover_dupl_in_mods(Name,Def,Ms,
+ [M#module{typeorval=lists:flatten(NewTorV)}|Acc],
+ NewAnyRenamed);
+discover_dupl_in_mods(_,_,[],Acc,AnyRenamed) ->
+ {Acc,AnyRenamed}.
+
+warn_renamed_def(ModName,NewName,OldName) ->
+ maybe_first_warn_print(),
+ io:format("NOTICE: The ASN.1 definition in module ~p with name ~p has been renamed in generated module. New name is ~p.~n",[ModName,OldName,NewName]).
+
+warn_deleted_def(ModName,DefName) ->
+ maybe_first_warn_print(),
+ io:format("NOTICE: The ASN.1 definition in module ~p with name ~p has been deleted in generated module.~n",[ModName,DefName]).
+
+warn_kept_def(ModName,DefName) ->
+ maybe_first_warn_print(),
+ io:format("NOTICE: The ASN.1 definition in module ~p with name ~p has kept its name due to equal definition as duplicate.~n",[ModName,DefName]).
+
+maybe_first_warn_print() ->
+ case get(warn_duplicate_defs) of
+ undefined ->
+ put(warn_duplicate_defs,true),
+ io:format("~nDue to multiple occurrences of a definition name in "
+ "multi-file compiled files:~n");
+ _ ->
+ ok
+ end.
+finished_warn_prints() ->
+ put(warn_duplicate_defs,undefined).
+
+
+exit_if_nameduplicate(#module{typeorval=TorV}) ->
+ exit_if_nameduplicate(TorV);
+exit_if_nameduplicate([]) ->
+ ok;
+exit_if_nameduplicate([Def|Rest]) ->
+ Name=get_name_of_def(Def),
+ exit_if_nameduplicate2(Name,Rest),
+ exit_if_nameduplicate(Rest).
+
+exit_if_nameduplicate2(Name,Rest) ->
+ Pred=fun(Def)->
+ case get_name_of_def(Def) of
+ Name -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(Pred,Rest) of
+ true ->
+ throw({error,{"more than one definition with same name",Name}});
+ _ ->
+ ok
+ end.
+
+compare_defs(D1,D2) ->
+ compare_defs2(unset_pos(D1),unset_pos(D2)).
+compare_defs2(D,D) ->
+ equal;
+compare_defs2(_,_) ->
+ not_equal.
+
+unset_pos(Def) when record(Def,typedef) ->
+ Def#typedef{pos=undefined};
+unset_pos(Def) when record(Def,classdef) ->
+ Def#classdef{pos=undefined};
+unset_pos(Def) when record(Def,valuedef) ->
+ Def#valuedef{pos=undefined};
+unset_pos(Def) when record(Def,ptypedef) ->
+ Def#ptypedef{pos=undefined};
+unset_pos(Def) when record(Def,pvaluedef) ->
+ Def#pvaluedef{pos=undefined};
+unset_pos(Def) when record(Def,pvaluesetdef) ->
+ Def#pvaluesetdef{pos=undefined};
+unset_pos(Def) when record(Def,pobjectdef) ->
+ Def#pobjectdef{pos=undefined};
+unset_pos(Def) when record(Def,pobjectsetdef) ->
+ Def#pobjectsetdef{pos=undefined}.
+
+get_pos_of_def(#typedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#classdef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#valuedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#ptypedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pvaluedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pvaluesetdef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pobjectdef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pobjectsetdef{pos=Pos}) ->
+ Pos.
+
+
+get_name_of_def(#typedef{name=Name}) ->
+ Name;
+get_name_of_def(#classdef{name=Name}) ->
+ Name;
+get_name_of_def(#valuedef{name=Name}) ->
+ Name;
+get_name_of_def(#ptypedef{name=Name}) ->
+ Name;
+get_name_of_def(#pvaluedef{name=Name}) ->
+ Name;
+get_name_of_def(#pvaluesetdef{name=Name}) ->
+ Name;
+get_name_of_def(#pobjectdef{name=Name}) ->
+ Name;
+get_name_of_def(#pobjectsetdef{name=Name}) ->
+ Name.
+
+set_name_of_def(ModName,Name,OldDef) ->
+ NewName = list_to_atom(lists:concat([Name,ModName])),
+ case OldDef of
+ #typedef{} -> OldDef#typedef{name=NewName};
+ #classdef{} -> OldDef#classdef{name=NewName};
+ #valuedef{} -> OldDef#valuedef{name=NewName};
+ #ptypedef{} -> OldDef#ptypedef{name=NewName};
+ #pvaluedef{} -> OldDef#pvaluedef{name=NewName};
+ #pvaluesetdef{} -> OldDef#pvaluesetdef{name=NewName};
+ #pobjectdef{} -> OldDef#pobjectdef{name=NewName};
+ #pobjectsetdef{} -> OldDef#pobjectsetdef{name=NewName}
+ end.
+
+save_imports(ModuleList)->
+ Fun = fun(M) ->
+ case M#module.imports of
+ {_,[]} -> [];
+ {_,I} ->
+ {M#module.name,I}
+ end
+ end,
+ ImportsList = lists:map(Fun,ModuleList),
+ case lists:flatten(ImportsList) of
+ [] ->
+ ok;
+ ImportsList2 ->
+ create_ets_table(original_imports,[named_table]),
+ ets:insert(original_imports,ImportsList2)
+ end.
+
+
+common_exports(ModuleList) ->
+ %% if all modules exports 'all' then export 'all',
+ %% otherwise export each typeorval name
+ case lists:filter(fun(X)->
+ element(2,X#module.exports) /= all
+ end,
+ ModuleList) of
+ []->
+ {exports,all};
+ ModsWithExpList ->
+ CExports1 =
+ lists:append(lists:map(fun(X)->element(2,X#module.exports) end,
+ ModsWithExpList)),
+ CExports2 = export_all(lists:subtract(ModuleList,ModsWithExpList)),
+ {exports,CExports1++CExports2}
+ end.
+
+export_all([])->[];
+export_all(ModuleList) ->
+ ExpList =
+ lists:map(
+ fun(M)->
+ TorVL=M#module.typeorval,
+ MName = M#module.name,
+ lists:map(
+ fun(Def)->
+ case Def of
+ T when record(T,typedef)->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=T#typedef.name};
+ V when record(V,valuedef) ->
+ #'Externalvaluereference'{pos=0,
+ module=MName,
+ value=V#valuedef.name};
+ C when record(C,classdef) ->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=C#classdef.name};
+ P when record(P,ptypedef) ->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=P#ptypedef.name};
+ PV when record(PV,pvaluesetdef) ->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=PV#pvaluesetdef.name};
+ PO when record(PO,pobjectdef) ->
+ #'Externalvaluereference'{pos=0,
+ module=MName,
+ value=PO#pobjectdef.name}
+ end
+ end,
+ TorVL)
+ end,
+ ModuleList),
+ lists:append(ExpList).
+
+%% common_imports/2
+%% IList is a list of tuples, {Imports,MName}, where Imports is the imports of
+%% the module with name MName.
+%% InputMNameL holds the names of all merged modules.
+%% Returns an import tuple with a list of imports that are external the merged
+%% set of modules.
+common_imports(IList,InputMNameL) ->
+ SetExternalImportsList = remove_in_set_imports(IList,InputMNameL,[]),
+ {imports,remove_import_doubles(SetExternalImportsList)}.
+
+check_tagdefault(ModList) ->
+ case have_same_tagdefault(ModList) of
+ {true,TagDefault} -> TagDefault;
+ {false,TagDefault} ->
+ create_ets_table(automatic_tags,[named_table]),
+ save_automatic_tagged_types(ModList),
+ TagDefault
+ end.
+
+have_same_tagdefault([#module{tagdefault=T}|Ms]) ->
+ have_same_tagdefault(Ms,{true,T}).
+
+have_same_tagdefault([],TagDefault) ->
+ TagDefault;
+have_same_tagdefault([#module{tagdefault=T}|Ms],TDefault={_,T}) ->
+ have_same_tagdefault(Ms,TDefault);
+have_same_tagdefault([#module{tagdefault=T1}|Ms],{_,T2}) ->
+ have_same_tagdefault(Ms,{false,rank_tagdef([T1,T2])}).
+
+rank_tagdef(L) ->
+ case lists:member('EXPLICIT',L) of
+ true -> 'EXPLICIT';
+ _ -> 'IMPLICIT'
+ end.
+
+save_automatic_tagged_types([])->
+ done;
+save_automatic_tagged_types([#module{tagdefault='AUTOMATIC',
+ typeorval=TorV}|Ms]) ->
+ Fun =
+ fun(T) ->
+ ets:insert(automatic_tags,{get_name_of_def(T)})
+ end,
+ lists:foreach(Fun,TorV),
+ save_automatic_tagged_types(Ms);
+save_automatic_tagged_types([_M|Ms]) ->
+ save_automatic_tagged_types(Ms).
+
+%% remove_in_set_imports/3 :
+%% input: list with tuples of each module's imports and module name
+%% respectively.
+%% output: one list with same format but each occured import from a
+%% module in the input set (IMNameL) is removed.
+remove_in_set_imports([{{imports,ImpL},_ModName}|Rest],InputMNameL,Acc) ->
+ NewImpL = remove_in_set_imports1(ImpL,InputMNameL,[]),
+ remove_in_set_imports(Rest,InputMNameL,NewImpL++Acc);
+remove_in_set_imports([],_,Acc) ->
+ lists:reverse(Acc).
+
+remove_in_set_imports1([I|Is],InputMNameL,Acc) ->
+ case I#'SymbolsFromModule'.module of
+ #'Externaltypereference'{type=MName} ->
+ case lists:member(MName,InputMNameL) of
+ true ->
+ remove_in_set_imports1(Is,InputMNameL,Acc);
+ false ->
+ remove_in_set_imports1(Is,InputMNameL,[I|Acc])
+ end;
+ _ ->
+ remove_in_set_imports1(Is,InputMNameL,[I|Acc])
+ end;
+remove_in_set_imports1([],_,Acc) ->
+ lists:reverse(Acc).
+
+remove_import_doubles([]) ->
+ [];
+%% If several modules in the merge set imports symbols from
+%% the same external module it might be doubled.
+%% ImportList has #'SymbolsFromModule' elements
+remove_import_doubles(ImportList) ->
+ MergedImportList =
+ merge_symbols_from_module(ImportList,[]),
+%% io:format("MergedImportList: ~p~n",[MergedImportList]),
+ delete_double_of_symbol(MergedImportList,[]).
+
+merge_symbols_from_module([Imp|Imps],Acc) ->
+ #'Externaltypereference'{type=ModName} = Imp#'SymbolsFromModule'.module,
+ IfromModName =
+ lists:filter(
+ fun(I)->
+ case I#'SymbolsFromModule'.module of
+ #'Externaltypereference'{type=ModName} ->
+ true;
+ #'Externalvaluereference'{value=ModName} ->
+ true;
+ _ -> false
+ end
+ end,
+ Imps),
+ NewImps = lists:subtract(Imps,IfromModName),
+%% io:format("Imp: ~p~nIfromModName: ~p~n",[Imp,IfromModName]),
+ NewImp =
+ Imp#'SymbolsFromModule'{
+ symbols = lists:append(
+ lists:map(fun(SL)->
+ SL#'SymbolsFromModule'.symbols
+ end,[Imp|IfromModName]))},
+ merge_symbols_from_module(NewImps,[NewImp|Acc]);
+merge_symbols_from_module([],Acc) ->
+ lists:reverse(Acc).
+
+delete_double_of_symbol([I|Is],Acc) ->
+ SymL=I#'SymbolsFromModule'.symbols,
+ NewSymL = delete_double_of_symbol1(SymL,[]),
+ delete_double_of_symbol(Is,[I#'SymbolsFromModule'{symbols=NewSymL}|Acc]);
+delete_double_of_symbol([],Acc) ->
+ Acc.
+
+delete_double_of_symbol1([TRef=#'Externaltypereference'{type=TrefName}|Rest],Acc)->
+ NewRest =
+ lists:filter(fun(S)->
+ case S of
+ #'Externaltypereference'{type=TrefName}->
+ false;
+ _ -> true
+ end
+ end,
+ Rest),
+ delete_double_of_symbol1(NewRest,[TRef|Acc]);
+delete_double_of_symbol1([VRef=#'Externalvaluereference'{value=VName}|Rest],Acc) ->
+ NewRest =
+ lists:filter(fun(S)->
+ case S of
+ #'Externalvaluereference'{value=VName}->
+ false;
+ _ -> true
+ end
+ end,
+ Rest),
+ delete_double_of_symbol1(NewRest,[VRef|Acc]);
+delete_double_of_symbol1([TRef={#'Externaltypereference'{type=MRef},
+ #'Externaltypereference'{type=TRef}}|Rest],
+ Acc)->
+ NewRest =
+ lists:filter(
+ fun(S)->
+ case S of
+ {#'Externaltypereference'{type=MRef},
+ #'Externaltypereference'{type=TRef}}->
+ false;
+ _ -> true
+ end
+ end,
+ Rest),
+ delete_double_of_symbol1(NewRest,[TRef|Acc]);
+delete_double_of_symbol1([],Acc) ->
+ Acc.
+
+
+scan_set(DirName,Files,Options) ->
+ lists:map(
+ fun(F)->
+ case scan({true,true},filename:join([DirName,F]),Options) of
+ {false,{error,Reason}} ->
+ throw({error,{'scan error in file:',F,Reason}});
+ {TrueOrFalse,Res} ->
+ {TrueOrFalse,Res,F}
+ end
+ end,
+ Files).
+
+parse_set(ScanRes,Options) ->
+ lists:map(
+ fun({TorF,Toks,F})->
+ case parse({TorF,Toks},F,Options) of
+ {false,{error,Reason}} ->
+ throw({error,{'parse error in file:',F,Reason}});
+ {TrueOrFalse,Res} ->
+ {TrueOrFalse,Res,F}
+ end
+ end,
+ ScanRes).
+
+
+%%***********************************
+
+
+scan({true,_}, File,Options) ->
+ case asn1ct_tok:file(File) of
+ {error,Reason} ->
+ io:format("~p~n",[Reason]),
+ {false,{error,Reason}};
+ Tokens ->
+ case lists:member(ss,Options) of
+ true -> % we terminate after scan
+ {false,Tokens};
+ false -> % continue with next pass
+ {true,Tokens}
+ end
+ end;
+scan({false,Result},_,_) ->
+ Result.
+
+
+parse({true,Tokens},File,Options) ->
+ %Presult = asn1ct_parser2:parse(Tokens),
+ %%case lists:member(p1,Options) of
+ %% true ->
+ %% asn1ct_parser:parse(Tokens);
+ %% _ ->
+ %% asn1ct_parser2:parse(Tokens)
+ %% end,
+ case catch asn1ct_parser2:parse(Tokens) of
+ {error,{{Line,_Mod,Message},_TokTup}} ->
+ if
+ integer(Line) ->
+ BaseName = filename:basename(File),
+ io:format("syntax error at line ~p in module ~s:~n",
+ [Line,BaseName]);
+ true ->
+ io:format("syntax error in module ~p:~n",[File])
+ end,
+ print_error_message(Message),
+ {false,{error,Message}};
+ {error,{Line,_Mod,[Message,Token]}} ->
+ io:format("syntax error: ~p ~p at line ~p~n",
+ [Message,Token,Line]),
+ {false,{error,{Line,[Message,Token]}}};
+ {ok,M} ->
+ case lists:member(sp,Options) of
+ true -> % terminate after parse
+ {false,M};
+ false -> % continue with next pass
+ {true,M}
+ end;
+ OtherError ->
+ io:format("~p~n",[OtherError])
+ end;
+parse({false,Tokens},_,_) ->
+ {false,Tokens}.
+
+check({true,M},File,OutFile,Includes,EncodingRule,DbFile,Options,InputMods) ->
+ cmp(M#module.name,File),
+ start(["."|Includes]),
+ case asn1ct_check:storeindb(M) of
+ ok ->
+ Module = asn1_db:dbget(M#module.name,'MODULE'),
+ State = #state{mname=Module#module.name,
+ module=Module#module{typeorval=[]},
+ erule=EncodingRule,
+ inputmodules=InputMods,
+ options=Options},
+ Check = asn1ct_check:check(State,Module#module.typeorval),
+ case {Check,lists:member(abs,Options)} of
+ {{error,Reason},_} ->
+ {false,{error,Reason}};
+ {{ok,NewTypeOrVal,_},true} ->
+ NewM = Module#module{typeorval=NewTypeOrVal},
+ asn1_db:dbput(NewM#module.name,'MODULE',NewM),
+ pretty2(M#module.name,lists:concat([OutFile,".abs"])),
+ {false,ok};
+ {{ok,NewTypeOrVal,GenTypeOrVal},_} ->
+ NewM = Module#module{typeorval=NewTypeOrVal},
+ asn1_db:dbput(NewM#module.name,'MODULE',NewM),
+ asn1_db:dbsave(DbFile,M#module.name),
+ io:format("--~p--~n",[{generated,DbFile}]),
+ {true,{M,NewM,GenTypeOrVal}}
+ end
+ end;
+check({false,M},_,_,_,_,_,_,_) ->
+ {false,M}.
+
+generate({true,{M,_Module,GenTOrV}},OutFile,EncodingRule,Options) ->
+ debug_on(Options),
+ case lists:member(compact_bit_string,Options) of
+ true -> put(compact_bit_string,true);
+ _ -> ok
+ end,
+ put(encoding_options,Options),
+ create_ets_table(check_functions,[named_table]),
+
+ %% create decoding function names and taglists for partial decode
+ %% For the time being leave errors unnoticed !!!!!!!!!
+% io:format("Options: ~p~n",[Options]),
+ case catch specialized_decode_prepare(EncodingRule,M,GenTOrV,Options) of
+ {error, enoent} -> ok;
+ {error, Reason} -> io:format("WARNING: Error in configuration"
+ "file: ~n~p~n",[Reason]);
+ {'EXIT',Reason} -> io:format("WARNING: Internal error when "
+ "analyzing configuration"
+ "file: ~n~p~n",[Reason]);
+ _ -> ok
+ end,
+
+ asn1ct_gen:pgen(OutFile,EncodingRule,M#module.name,GenTOrV),
+ debug_off(Options),
+ put(compact_bit_string,false),
+ erase(encoding_options),
+ erase(tlv_format), % used in ber_bin, optimize
+ erase(class_default_type),% used in ber_bin, optimize
+ ets:delete(check_functions),
+ case lists:member(sg,Options) of
+ true -> % terminate here , with .erl file generated
+ {false,true};
+ false ->
+ {true,true}
+ end;
+generate({false,M},_,_,_) ->
+ {false,M}.
+
+compile_erl({true,_},OutFile,Options) ->
+ erl_compile(OutFile,Options);
+compile_erl({false,true},_,_) ->
+ ok;
+compile_erl({false,Result},_,_) ->
+ Result.
+
+input_file_type([]) ->
+ {empty_name,[]};
+input_file_type(File) ->
+ case filename:extension(File) of
+ [] ->
+ case file:read_file_info(lists:concat([File,".asn1"])) of
+ {ok,_FileInfo} ->
+ {single_file, lists:concat([File,".asn1"])};
+ _Error ->
+ case file:read_file_info(lists:concat([File,".asn"])) of
+ {ok,_FileInfo} ->
+ {single_file, lists:concat([File,".asn"])};
+ _Error ->
+ {single_file, lists:concat([File,".py"])}
+ end
+ end;
+ ".asn1config" ->
+ case read_config_file(File,asn1_module) of
+ {ok,Asn1Module} ->
+ put(asn1_config_file,File),
+ input_file_type(Asn1Module);
+ Error ->
+ Error
+ end;
+ Asn1PFix ->
+ Base = filename:basename(File,Asn1PFix),
+ case filename:extension(Base) of
+ [] ->
+ {single_file,File};
+ SetPFix when (SetPFix == ".set") ->
+ {multiple_files_file,
+ filename:basename(Base,SetPFix),
+ File};
+ _Error ->
+ throw({input_file_error,{'Bad input file',File}})
+ end
+ end.
+
+get_file_list(File) ->
+ case file:open(File, [read]) of
+ {error,Reason} ->
+ {error,{File,file:format_error(Reason)}};
+ {ok,Stream} ->
+ get_file_list1(Stream,[])
+ end.
+
+get_file_list1(Stream,Acc) ->
+ Ret = io:get_line(Stream,''),
+ case Ret of
+ eof ->
+ file:close(Stream),
+ lists:reverse(Acc);
+ FileName ->
+ PrefixedNameList =
+ case (catch input_file_type(lists:delete($\n,FileName))) of
+ {empty_name,[]} -> [];
+ {single_file,Name} -> [Name];
+ {multiple_files_file,Name} ->
+ get_file_list(Name);
+ Err = {input_file_error,_Reason} ->
+ throw(Err)
+ end,
+ get_file_list1(Stream,PrefixedNameList++Acc)
+ end.
+
+get_rule(Options) ->
+ case [Rule ||Rule <-[per,ber,ber_bin,ber_bin_v2,per_bin],
+ Opt <- Options,
+ Rule==Opt] of
+ [Rule] ->
+ Rule;
+ [Rule|_] ->
+ Rule;
+ [] ->
+ ber
+ end.
+
+erl_compile(OutFile,Options) ->
+% io:format("Options:~n~p~n",[Options]),
+ case lists:member(noobj,Options) of
+ true ->
+ ok;
+ _ ->
+ ErlOptions = remove_asn_flags(Options),
+ case c:c(OutFile,ErlOptions) of
+ {ok,_Module} ->
+ ok;
+ _ ->
+ {error,'no_compilation'}
+ end
+ end.
+
+remove_asn_flags(Options) ->
+ [X || X <- Options,
+ X /= get_rule(Options),
+ X /= optimize,
+ X /= compact_bit_string,
+ X /= debug,
+ X /= keyed_list].
+
+debug_on(Options) ->
+ case lists:member(debug,Options) of
+ true ->
+ put(asndebug,true);
+ _ ->
+ true
+ end,
+ case lists:member(keyed_list,Options) of
+ true ->
+ put(asn_keyed_list,true);
+ _ ->
+ true
+ end.
+
+
+debug_off(_Options) ->
+ erase(asndebug),
+ erase(asn_keyed_list).
+
+
+outfile(Base, Ext, Opts) when atom(Ext) ->
+ outfile(Base, atom_to_list(Ext), Opts);
+outfile(Base, Ext, Opts) ->
+ Obase = case lists:keysearch(outdir, 1, Opts) of
+ {value, {outdir, Odir}} -> filename:join(Odir, Base);
+ _NotFound -> Base % Not found or bad format
+ end,
+ case Ext of
+ [] ->
+ Obase;
+ _ ->
+ Obase++"."++Ext
+ end.
+
+%% compile(AbsFileName, Options)
+%% Compile entry point for erl_compile.
+
+compile_asn(File,OutFile,Options) ->
+ compile(lists:concat([File,".asn"]),OutFile,Options).
+
+compile_asn1(File,OutFile,Options) ->
+ compile(lists:concat([File,".asn1"]),OutFile,Options).
+
+compile_py(File,OutFile,Options) ->
+ compile(lists:concat([File,".py"]),OutFile,Options).
+
+compile(File, _OutFile, Options) ->
+ case catch compile(File, make_erl_options(Options)) of
+ Exit = {'EXIT',_Reason} ->
+ io:format("~p~n~s~n",[Exit,"error"]),
+ error;
+ {error,_Reason} ->
+ %% case occurs due to error in asn1ct_parser2,asn1ct_check
+%% io:format("~p~n",[_Reason]),
+%% io:format("~p~n~s~n",[_Reason,"error"]),
+ error;
+ ok ->
+ io:format("ok~n"),
+ ok;
+ ParseRes when tuple(ParseRes) ->
+ io:format("~p~n",[ParseRes]),
+ ok;
+ ScanRes when list(ScanRes) ->
+ io:format("~p~n",[ScanRes]),
+ ok;
+ Unknown ->
+ io:format("~p~n~s~n",[Unknown,"error"]),
+ error
+ end.
+
+%% Converts generic compiler options to specific options.
+
+make_erl_options(Opts) ->
+
+ %% This way of extracting will work even if the record passed
+ %% has more fields than known during compilation.
+
+ Includes = Opts#options.includes,
+ Defines = Opts#options.defines,
+ Outdir = Opts#options.outdir,
+%% Warning = Opts#options.warning,
+ Verbose = Opts#options.verbose,
+ Specific = Opts#options.specific,
+ Optimize = Opts#options.optimize,
+ OutputType = Opts#options.output_type,
+ Cwd = Opts#options.cwd,
+
+ Options =
+ case Verbose of
+ true -> [verbose];
+ false -> []
+ end ++
+%%% case Warning of
+%%% 0 -> [];
+%%% _ -> [report_warnings]
+%%% end ++
+ [] ++
+ case Optimize of
+ 1 -> [optimize];
+ 999 -> [];
+ _ -> [{optimize,Optimize}]
+ end ++
+ lists:map(
+ fun ({Name, Value}) ->
+ {d, Name, Value};
+ (Name) ->
+ {d, Name}
+ end,
+ Defines) ++
+ case OutputType of
+ undefined -> [ber]; % temporary default (ber when it's ready)
+ ber -> [ber];
+ ber_bin -> [ber_bin];
+ ber_bin_v2 -> [ber_bin_v2];
+ per -> [per];
+ per_bin -> [per_bin]
+ end,
+
+ Options++[report_errors, {cwd, Cwd}, {outdir, Outdir}|
+ lists:map(fun(Dir) -> {i, Dir} end, Includes)]++Specific.
+
+pretty2(Module,AbsFile) ->
+ start(),
+ {ok,F} = file:open(AbsFile, [write]),
+ M = asn1_db:dbget(Module,'MODULE'),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.defid)]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.tagdefault)]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.exports)]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.imports)]),
+ io:format(F,"~s\n\n",[asn1ct_pretty_format:term(M#module.extensiondefault)]),
+
+ {Types,Values,ParameterizedTypes,Classes,Objects,ObjectSets} = M#module.typeorval,
+ io:format(F,"%%%%%%%%%%%%%%%%%%% TYPES in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Types),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% VALUES in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Values),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Parameterized Types in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,ParameterizedTypes),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Classes in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Classes),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Objects in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Objects),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Object Sets in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,ObjectSets).
+start() ->
+ Includes = ["."],
+ start(Includes).
+
+
+start(Includes) when list(Includes) ->
+ asn1_db:dbstart(Includes).
+
+stop() ->
+ save(),
+ asn1_db:stop_server(ns),
+ asn1_db:stop_server(rand),
+ stopped.
+
+save() ->
+ asn1_db:dbstop().
+
+%%clear() ->
+%% asn1_db:dbclear().
+
+encode(Module,Term) ->
+ asn1rt:encode(Module,Term).
+
+encode(Module,Type,Term) when list(Module) ->
+ asn1rt:encode(list_to_atom(Module),Type,Term);
+encode(Module,Type,Term) ->
+ asn1rt:encode(Module,Type,Term).
+
+decode(Module,Type,Bytes) when list(Module) ->
+ asn1rt:decode(list_to_atom(Module),Type,Bytes);
+decode(Module,Type,Bytes) ->
+ asn1rt:decode(Module,Type,Bytes).
+
+
+test(Module) ->
+ start(),
+ M = asn1_db:dbget(Module,'MODULE'),
+ {Types,_Values,_Ptypes,_Classes,_Objects,_ObjectSets} = M#module.typeorval,
+ test_each(Module,Types).
+
+test_each(Module,[Type | Rest]) ->
+ case test(Module,Type) of
+ {ok,_Result} ->
+ test_each(Module,Rest);
+ Error ->
+ Error
+ end;
+test_each(_,[]) ->
+ ok.
+
+test(Module,Type) ->
+ io:format("~p:~p~n",[Module,Type]),
+ case (catch value(Module,Type)) of
+ {ok,Val} ->
+ %% io:format("asn1ct:test/2: ~w~n",[Val]),
+ test(Module,Type,Val);
+ {'EXIT',Reason} ->
+ {error,{asn1,{value,Reason}}}
+ end.
+
+
+test(Module,Type,Value) ->
+ case catch encode(Module,Type,Value) of
+ {ok,Bytes} ->
+ %% io:format("test 1: ~p~n",[{Bytes}]),
+ M = if
+ list(Module) ->
+ list_to_atom(Module);
+ true ->
+ Module
+ end,
+ NewBytes =
+ case M:encoding_rule() of
+ ber ->
+ lists:flatten(Bytes);
+ ber_bin when binary(Bytes) ->
+ Bytes;
+ ber_bin ->
+ list_to_binary(Bytes);
+ ber_bin_v2 when binary(Bytes) ->
+ Bytes;
+ ber_bin_v2 ->
+ list_to_binary(Bytes);
+ per ->
+ lists:flatten(Bytes);
+ per_bin when binary(Bytes) ->
+ Bytes;
+ per_bin ->
+ list_to_binary(Bytes)
+ end,
+ case decode(Module,Type,NewBytes) of
+ {ok,Value} ->
+ {ok,{Module,Type,Value}};
+ {ok,Res} ->
+ {error,{asn1,{encode_decode_mismatch,
+ {{Module,Type,Value},Res}}}};
+ Error ->
+ {error,{asn1,{{decode,
+ {Module,Type,Value},Error}}}}
+ end;
+ Error ->
+ {error,{asn1,{encode,{{Module,Type,Value},Error}}}}
+ end.
+
+value(Module) ->
+ start(),
+ M = asn1_db:dbget(Module,'MODULE'),
+ {Types,_Values,_Ptypes,_Classes,_Objects,_ObjectSets} = M#module.typeorval,
+ lists:map(fun(A) ->value(Module,A) end,Types).
+
+value(Module,Type) ->
+ start(),
+ case catch asn1ct_value:get_type(Module,Type,no) of
+ {error,Reason} ->
+ {error,Reason};
+ {'EXIT',Reason} ->
+ {error,Reason};
+ Result ->
+ {ok,Result}
+ end.
+
+cmp(Module,InFile) ->
+ Base = filename:basename(InFile),
+ Dir = filename:dirname(InFile),
+ Ext = filename:extension(Base),
+ Finfo = file:read_file_info(InFile),
+ Minfo = file:read_file_info(filename:join(Dir,lists:concat([Module,Ext]))),
+ case Finfo of
+ Minfo ->
+ ok;
+ _ ->
+ io:format("asn1error: Modulename and filename must be equal~n",[]),
+ throw(error)
+ end.
+
+vsn() ->
+ ?vsn.
+
+print_error_message([got,H|T]) when list(H) ->
+ io:format(" got:"),
+ print_listing(H,"and"),
+ print_error_message(T);
+print_error_message([expected,H|T]) when list(H) ->
+ io:format(" expected one of:"),
+ print_listing(H,"or"),
+ print_error_message(T);
+print_error_message([H|T]) ->
+ io:format(" ~p",[H]),
+ print_error_message(T);
+print_error_message([]) ->
+ io:format("~n").
+
+print_listing([H1,H2|[]],AndOr) ->
+ io:format(" ~p ~s ~p",[H1,AndOr,H2]);
+print_listing([H1,H2|T],AndOr) ->
+ io:format(" ~p,",[H1]),
+ print_listing([H2|T],AndOr);
+print_listing([H],_AndOr) ->
+ io:format(" ~p",[H]);
+print_listing([],_) ->
+ ok.
+
+
+%% functions to administer ets tables
+
+%% Always creates a new table
+create_ets_table(Name,Options) when atom(Name) ->
+ case ets:info(Name) of
+ undefined ->
+ ets:new(Name,Options);
+ _ ->
+ ets:delete(Name),
+ ets:new(Name,Options)
+ end.
+
+%% Creates a new ets table only if no table exists
+create_if_no_table(Name,Options) ->
+ case ets:info(Name) of
+ undefined ->
+ %% create a new table
+ create_ets_table(Name,Options);
+ _ -> ok
+ end.
+
+
+delete_tables([Table|Ts]) ->
+ case ets:info(Table) of
+ undefined -> ok;
+ _ -> ets:delete(Table)
+ end,
+ delete_tables(Ts);
+delete_tables([]) ->
+ ok.
+
+
+specialized_decode_prepare(Erule,M,TsAndVs,Options) ->
+% Asn1confMember =
+% fun([{asn1config,File}|_],_) ->
+% {true,File};
+% ([],_) -> false;
+% ([_H|T],Fun) ->
+% Fun(T,Fun)
+% end,
+% case Asn1confMember(Options,Asn1confMember) of
+% {true,File} ->
+ case lists:member(asn1config,Options) of
+ true ->
+ partial_decode_prepare(Erule,M,TsAndVs,Options);
+ _ ->
+ ok
+ end.
+%% Reads the configuration file if it exists and stores information
+%% about partial decode and incomplete decode
+partial_decode_prepare(ber_bin_v2,M,TsAndVs,Options) when tuple(TsAndVs) ->
+ %% read configure file
+% Types = element(1,TsAndVs),
+ CfgList = read_config_file(M#module.name),
+ SelectedDecode = get_config_info(CfgList,partial_decode),
+ ExclusiveDecode = get_config_info(CfgList,exclusive_decode),
+ CommandList =
+ create_partial_decode_gen_info(M#module.name,SelectedDecode),
+% io:format("partial_decode = ~p~n",[CommandList]),
+
+ save_config(partial_decode,CommandList),
+ CommandList2 =
+ create_partial_inc_decode_gen_info(M#module.name,ExclusiveDecode),
+% io:format("partial_incomplete_decode = ~p~n",[CommandList2]),
+ Part_inc_tlv_tags = tag_format(ber_bin_v2,Options,CommandList2),
+% io:format("partial_incomplete_decode: tlv_tags = ~p~n",[Part_inc_tlv_tags]),
+ save_config(partial_incomplete_decode,Part_inc_tlv_tags),
+ save_gen_state(ExclusiveDecode,Part_inc_tlv_tags);
+partial_decode_prepare(_,_,_,_) ->
+ ok.
+
+
+
+%% create_partial_inc_decode_gen_info/2
+%%
+%% Creats a list of tags out of the information in TypeNameList that
+%% tells which value will be incomplete decoded, i.e. each end
+%% component/type in TypeNameList. The significant types/components in
+%% the path from the toptype must be specified in the
+%% TypeNameList. Significant elements are all constructed types that
+%% branches the path to the leaf and the leaf it selfs.
+%%
+%% Returns a list of elements, where an element may be one of
+%% mandatory|[opt,Tag]|[bin,Tag]. mandatory correspond to a mandatory
+%% element that shall be decoded as usual. [opt,Tag] matches an
+%% OPTIONAL or DEFAULT element that shall be decoded as
+%% usual. [bin,Tag] corresponds to an element, mandatory, OPTIONAL or
+%% DEFAULT, that shall be left encoded (incomplete decoded).
+create_partial_inc_decode_gen_info(ModName,{Mod,[{Name,L}|Ls]}) when list(L) ->
+ TopTypeName = partial_inc_dec_toptype(L),
+ [{Name,TopTypeName,
+ create_partial_inc_decode_gen_info1(ModName,TopTypeName,{Mod,L})}|
+ create_partial_inc_decode_gen_info(ModName,{Mod,Ls})];
+create_partial_inc_decode_gen_info(_,{_,[]}) ->
+ [];
+create_partial_inc_decode_gen_info(_,[]) ->
+ [].
+
+create_partial_inc_decode_gen_info1(ModName,TopTypeName,{ModName,
+ [_TopType|Rest]}) ->
+ case asn1_db:dbget(ModName,TopTypeName) of
+ #typedef{typespec=TS} ->
+ TagCommand = get_tag_command(TS,?MANDATORY,mandatory),
+ create_pdec_inc_command(ModName,get_components(TS#type.def),
+ Rest,[TagCommand]);
+ _ ->
+ throw({error,{"wrong type list in asn1 config file",
+ TopTypeName}})
+ end;
+create_partial_inc_decode_gen_info1(M1,_,{M2,_}) when M1 /= M2 ->
+ throw({error,{"wrong module name in asn1 config file",
+ M2}});
+create_partial_inc_decode_gen_info1(_,_,TNL) ->
+ throw({error,{"wrong type list in asn1 config file",
+ TNL}}).
+
+%%
+%% Only when there is a 'ComponentType' the config data C1 may be a
+%% list, where the incomplete decode is branched. So, C1 may be a
+%% list, a "binary tuple", a "parts tuple" or an atom. The second
+%% element of a binary tuple and a parts tuple is an atom.
+create_pdec_inc_command(_ModName,_,[],Acc) ->
+ lists:reverse(Acc);
+create_pdec_inc_command(ModName,{Comps1,Comps2},TNL,Acc)
+ when list(Comps1),list(Comps2) ->
+ create_pdec_inc_command(ModName,Comps1 ++ Comps2,TNL,Acc);
+create_pdec_inc_command(ModN,Clist,[CL|_Rest],Acc) when list(CL) ->
+ create_pdec_inc_command(ModN,Clist,CL,Acc);
+create_pdec_inc_command(ModName,
+ CList=[#'ComponentType'{name=Name,typespec=TS,
+ prop=Prop}|Comps],
+ TNL=[C1|Cs],Acc) ->
+ case C1 of
+% Name ->
+% %% In this case C1 is an atom
+% TagCommand = get_tag_command(TS,?MANDATORY,Prop),
+% create_pdec_inc_command(ModName,get_components(TS#type.def),Cs,[TagCommand|Acc]);
+ {Name,undecoded} ->
+ TagCommand = get_tag_command(TS,?UNDECODED,Prop),
+ create_pdec_inc_command(ModName,Comps,Cs,[TagCommand|Acc]);
+ {Name,parts} ->
+ TagCommand = get_tag_command(TS,?PARTS,Prop),
+ create_pdec_inc_command(ModName,Comps,Cs,[TagCommand|Acc]);
+ L when list(L) ->
+ %% This case is only possible as the first element after
+ %% the top type element, when top type is SEGUENCE or SET.
+ %% Follow each element in L. Must note every tag on the
+ %% way until the last command is reached, but it ought to
+ %% be enough to have a "complete" or "complete optional"
+ %% command for each component that is not specified in the
+ %% config file. Then in the TLV decode the components with
+ %% a "complete" command will be decoded by an ordinary TLV
+ %% decode.
+ create_pdec_inc_command(ModName,CList,L,Acc);
+ {Name,RestPartsList} when list(RestPartsList) ->
+ %% Same as previous, but this may occur at any place in
+ %% the structure. The previous is only possible as the
+ %% second element.
+ case get_tag_command(TS,?MANDATORY,Prop) of
+ ?MANDATORY ->
+ InnerDirectives=
+ create_pdec_inc_command(ModName,TS#type.def,
+ RestPartsList,[]),
+ create_pdec_inc_command(ModName,Comps,Cs,
+ [[?MANDATORY,InnerDirectives]|Acc]);
+% create_pdec_inc_command(ModName,Comps,Cs,
+% [InnerDirectives,?MANDATORY|Acc]);
+ [Opt,EncTag] ->
+ InnerDirectives =
+ create_pdec_inc_command(ModName,TS#type.def,
+ RestPartsList,[]),
+ create_pdec_inc_command(ModName,Comps,Cs,
+ [[Opt,EncTag,InnerDirectives]|Acc])
+ end;
+% create_pdec_inc_command(ModName,CList,RestPartsList,Acc);
+%% create_pdec_inc_command(ModName,TS#type.def,RestPartsList,Acc);
+ _ -> %% this component may not be in the config list
+ TagCommand = get_tag_command(TS,?MANDATORY,Prop),
+ create_pdec_inc_command(ModName,Comps,TNL,[TagCommand|Acc])
+ end;
+create_pdec_inc_command(ModName,
+ {'CHOICE',[#'ComponentType'{name=C1,
+ typespec=TS,
+ prop=Prop}|Comps]},
+ [{C1,Directive}|Rest],Acc) ->
+ case Directive of
+ List when list(List) ->
+ [Command,Tag] = get_tag_command(TS,?ALTERNATIVE,Prop),
+ CompAcc = create_pdec_inc_command(ModName,TS#type.def,List,[]),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},Rest,
+ [[Command,Tag,CompAcc]|Acc]);
+ undecoded ->
+ TagCommand = get_tag_command(TS,?ALTERNATIVE_UNDECODED,Prop),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},Rest,
+ [TagCommand|Acc]);
+ parts ->
+ TagCommand = get_tag_command(TS,?ALTERNATIVE_PARTS,Prop),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},Rest,
+ [TagCommand|Acc])
+ end;
+create_pdec_inc_command(ModName,
+ {'CHOICE',[#'ComponentType'{typespec=TS,
+ prop=Prop}|Comps]},
+ TNL,Acc) ->
+ TagCommand = get_tag_command(TS,?ALTERNATIVE,Prop),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},TNL,[TagCommand|Acc]);
+create_pdec_inc_command(M,{'CHOICE',{Cs1,Cs2}},TNL,Acc)
+ when list(Cs1),list(Cs2) ->
+ create_pdec_inc_command(M,{'CHOICE',Cs1 ++ Cs2},TNL,Acc);
+create_pdec_inc_command(ModName,#'Externaltypereference'{module=M,type=Name},
+ TNL,Acc) ->
+ #type{def=Def} = get_referenced_type(M,Name),
+ create_pdec_inc_command(ModName,get_components(Def),TNL,Acc);
+create_pdec_inc_command(_,_,TNL,_) ->
+ throw({error,{"unexpected error when creating partial "
+ "decode command",TNL}}).
+
+partial_inc_dec_toptype([T|_]) when atom(T) ->
+ T;
+partial_inc_dec_toptype([{T,_}|_]) when atom(T) ->
+ T;
+partial_inc_dec_toptype([L|_]) when list(L) ->
+ partial_inc_dec_toptype(L);
+partial_inc_dec_toptype(_) ->
+ throw({error,{"no top type found for partial incomplete decode"}}).
+
+
+%% Creats a list of tags out of the information in TypeList and Types
+%% that tells which value will be decoded. Each constructed type that
+%% is in the TypeList will get a "choosen" command. Only the last
+%% type/component in the TypeList may be a primitive type. Components
+%% "on the way" to the final element may get the "skip" or the
+%% "skip_optional" command.
+%% CommandList = [Elements]
+%% Elements = {choosen,Tag}|{skip_optional,Tag}|skip
+%% Tag is a binary with the tag BER encoded.
+create_partial_decode_gen_info(ModName,{{_,ModName},TypeList}) ->
+ case TypeList of
+ [TopType|Rest] ->
+ case asn1_db:dbget(ModName,TopType) of
+ #typedef{typespec=TS} ->
+ TagCommand = get_tag_command(TS,?CHOOSEN),
+ create_pdec_command(ModName,get_components(TS#type.def),
+ Rest,[TagCommand]);
+ _ ->
+ throw({error,{"wrong type list in asn1 config file",
+ TypeList}})
+ end;
+ _ ->
+ []
+ end;
+create_partial_decode_gen_info(_,[]) ->
+ [];
+create_partial_decode_gen_info(_M1,{{_,M2},_}) ->
+ throw({error,{"wrong module name in asn1 config file",
+ M2}}).
+
+%% create_pdec_command/4 for each name (type or component) in the
+%% third argument, TypeNameList, a command is created. The command has
+%% information whether the component/type shall be skipped, looked
+%% into or returned. The list of commands is returned.
+create_pdec_command(_ModName,_,[],Acc) ->
+ lists:reverse(Acc);
+create_pdec_command(ModName,[#'ComponentType'{name=C1,typespec=TS}|_Comps],
+ [C1|Cs],Acc) ->
+ %% this component is a constructed type or the last in the
+ %% TypeNameList otherwise the config spec is wrong
+ TagCommand = get_tag_command(TS,?CHOOSEN),
+ create_pdec_command(ModName,get_components(TS#type.def),
+ Cs,[TagCommand|Acc]);
+create_pdec_command(ModName,[#'ComponentType'{typespec=TS,
+ prop=Prop}|Comps],
+ [C2|Cs],Acc) ->
+ TagCommand =
+ case Prop of
+ mandatory ->
+ get_tag_command(TS,?SKIP);
+ _ ->
+ get_tag_command(TS,?SKIP_OPTIONAL)
+ end,
+ create_pdec_command(ModName,Comps,[C2|Cs],[TagCommand|Acc]);
+create_pdec_command(ModName,{'CHOICE',[Comp=#'ComponentType'{name=C1}|_]},TNL=[C1|_Cs],Acc) ->
+ create_pdec_command(ModName,[Comp],TNL,Acc);
+create_pdec_command(ModName,{'CHOICE',[#'ComponentType'{}|Comps]},TNL,Acc) ->
+ create_pdec_command(ModName,{'CHOICE',Comps},TNL,Acc);
+create_pdec_command(ModName,#'Externaltypereference'{module=M,type=C1},
+ TypeNameList,Acc) ->
+ case get_referenced_type(M,C1) of
+ #type{def=Def} ->
+ create_pdec_command(ModName,get_components(Def),TypeNameList,
+ Acc);
+ Err ->
+ throw({error,{"unexpected result when fetching "
+ "referenced element",Err}})
+ end;
+create_pdec_command(ModName,TS=#type{def=Def},[C1|Cs],Acc) ->
+ %% This case when we got the "components" of a SEQUENCE/SET OF
+ case C1 of
+ [1] ->
+ %% A list with an integer is the only valid option in a 'S
+ %% OF', the other valid option would be an empty
+ %% TypeNameList saying that the entire 'S OF' will be
+ %% decoded.
+ TagCommand = get_tag_command(TS,?CHOOSEN),
+ create_pdec_command(ModName,Def,Cs,[TagCommand|Acc]);
+ [N] when integer(N) ->
+ TagCommand = get_tag_command(TS,?SKIP),
+ create_pdec_command(ModName,Def,[[N-1]|Cs],[TagCommand|Acc]);
+ Err ->
+ throw({error,{"unexpected error when creating partial "
+ "decode command",Err}})
+ end;
+create_pdec_command(_,_,TNL,_) ->
+ throw({error,{"unexpected error when creating partial "
+ "decode command",TNL}}).
+
+% get_components({'CHOICE',Components}) ->
+% Components;
+get_components(#'SEQUENCE'{components=Components}) ->
+ Components;
+get_components(#'SET'{components=Components}) ->
+ Components;
+get_components({'SEQUENCE OF',Components}) ->
+ Components;
+get_components({'SET OF',Components}) ->
+ Components;
+get_components(Def) ->
+ Def.
+
+%% get_tag_command(Type,Command)
+
+%% Type is the type that has information about the tag Command tells
+%% what to do with the encoded value with the tag of Type when
+%% decoding.
+get_tag_command(#type{tag=[]},_) ->
+ [];
+get_tag_command(#type{tag=[_Tag]},?SKIP) ->
+ ?SKIP;
+get_tag_command(#type{tag=[Tag]},Command) ->
+ %% encode the tag according to BER
+ [Command,encode_tag_val(decode_class(Tag#tag.class),Tag#tag.form,
+ Tag#tag.number)];
+get_tag_command(T=#type{tag=[Tag|Tags]},Command) ->
+ [get_tag_command(T#type{tag=Tag},Command)|
+ get_tag_command(T#type{tag=Tags},Command)].
+
+%% get_tag_command/3 used by create_pdec_inc_command
+get_tag_command(#type{tag=[]},_,_) ->
+ [];
+get_tag_command(#type{tag=[Tag]},?MANDATORY,Prop) ->
+ case Prop of
+ mandatory ->
+ ?MANDATORY;
+ {'DEFAULT',_} ->
+ [?DEFAULT,encode_tag_val(decode_class(Tag#tag.class),
+ Tag#tag.form,Tag#tag.number)];
+ _ -> [?OPTIONAL,encode_tag_val(decode_class(Tag#tag.class),
+ Tag#tag.form,Tag#tag.number)]
+ end;
+get_tag_command(#type{tag=[Tag]},Command,_) ->
+ [Command,encode_tag_val(decode_class(Tag#tag.class),Tag#tag.form,
+ Tag#tag.number)].
+
+
+get_referenced_type(M,Name) ->
+ case asn1_db:dbget(M,Name) of
+ #typedef{typespec=TS} ->
+ case TS of
+ #type{def=#'Externaltypereference'{module=M2,type=Name2}} ->
+ %% The tags have already been taken care of in the
+ %% first reference where they were gathered in a
+ %% list of tags.
+ get_referenced_type(M2,Name2);
+ #type{} -> TS;
+ _ ->
+ throw({error,{"unexpected element when"
+ " fetching referenced type",TS}})
+ end;
+ T ->
+ throw({error,{"unexpected element when fetching "
+ "referenced type",T}})
+ end.
+
+tag_format(EncRule,_Options,CommandList) ->
+ case EncRule of
+ ber_bin_v2 ->
+ tlv_tags(CommandList);
+ _ ->
+ CommandList
+ end.
+
+tlv_tags([]) ->
+ [];
+tlv_tags([mandatory|Rest]) ->
+ [mandatory|tlv_tags(Rest)];
+tlv_tags([[Command,Tag]|Rest]) when atom(Command),binary(Tag) ->
+ [[Command,tlv_tag(Tag)]|tlv_tags(Rest)];
+tlv_tags([[Command,Directives]|Rest]) when atom(Command),list(Directives) ->
+ [[Command,tlv_tags(Directives)]|tlv_tags(Rest)];
+%% remove all empty lists
+tlv_tags([[]|Rest]) ->
+ tlv_tags(Rest);
+tlv_tags([{Name,TopType,L1}|Rest]) when list(L1),atom(TopType) ->
+ [{Name,TopType,tlv_tags(L1)}|tlv_tags(Rest)];
+tlv_tags([[Command,Tag,L1]|Rest]) when list(L1),binary(Tag) ->
+ [[Command,tlv_tag(Tag),tlv_tags(L1)]|tlv_tags(Rest)];
+tlv_tags([L=[L1|_]|Rest]) when list(L1) ->
+ [tlv_tags(L)|tlv_tags(Rest)].
+
+tlv_tag(<<Cl:2,_:1,TagNo:5>>) when TagNo < 31 ->
+ (Cl bsl 16) + TagNo;
+tlv_tag(<<Cl:2,_:1,31:5,0:1,TagNo:7>>) ->
+ (Cl bsl 16) + TagNo;
+tlv_tag(<<Cl:2,_:1,31:5,Buffer/binary>>) ->
+ TagNo = tlv_tag1(Buffer,0),
+ (Cl bsl 16) + TagNo.
+tlv_tag1(<<0:1,PartialTag:7>>,Acc) ->
+ (Acc bsl 7) bor PartialTag;
+tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) ->
+ tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag).
+
+%% reads the content from the configuration file and returns the
+%% selected part choosen by InfoType. Assumes that the config file
+%% content is an Erlang term.
+read_config_file(ModuleName,InfoType) when atom(InfoType) ->
+ CfgList = read_config_file(ModuleName),
+ get_config_info(CfgList,InfoType).
+
+
+read_config_file(ModuleName) ->
+ case file:consult(lists:concat([ModuleName,'.asn1config'])) of
+% case file:consult(ModuleName) of
+ {ok,CfgList} ->
+ CfgList;
+ {error,enoent} ->
+ Options = get(encoding_options),
+ Includes = [I || {i,I} <- Options],
+ read_config_file1(ModuleName,Includes);
+ {error,Reason} ->
+ file:format_error(Reason),
+ throw({error,{"error reading asn1 config file",Reason}})
+ end.
+read_config_file1(ModuleName,[]) ->
+ case filename:extension(ModuleName) of
+ ".asn1config" ->
+ throw({error,enoent});
+ _ ->
+ read_config_file(lists:concat([ModuleName,".asn1config"]))
+ end;
+read_config_file1(ModuleName,[H|T]) ->
+% File = filename:join([H,lists:concat([ModuleName,'.asn1config'])]),
+ File = filename:join([H,ModuleName]),
+ case file:consult(File) of
+ {ok,CfgList} ->
+ CfgList;
+ {error,enoent} ->
+ read_config_file1(ModuleName,T);
+ {error,Reason} ->
+ file:format_error(Reason),
+ throw({error,{"error reading asn1 config file",Reason}})
+ end.
+
+get_config_info(CfgList,InfoType) ->
+ case InfoType of
+ all ->
+ CfgList;
+ _ ->
+ case lists:keysearch(InfoType,1,CfgList) of
+ {value,{InfoType,Value}} ->
+ Value;
+ false ->
+ []
+ end
+ end.
+
+%% save_config/2 saves the Info with the key Key
+%% Before saving anything check if a table exists
+save_config(Key,Info) ->
+ create_if_no_table(asn1_general,[named_table]),
+ ets:insert(asn1_general,{{asn1_config,Key},Info}).
+
+read_config_data(Key) ->
+ case ets:info(asn1_general) of
+ undefined -> undefined;
+ _ ->
+ case ets:lookup(asn1_general,{asn1_config,Key}) of
+ [{_,Data}] -> Data;
+ Err ->
+ io:format("strange data from config file ~w~n",[Err]),
+ Err
+ end
+ end.
+
+
+%%
+%% Functions to manipulate the gen_state record saved in the
+%% asn1_general ets table.
+%%
+
+%% saves input data in a new gen_state record
+save_gen_state({_,ConfList},PartIncTlvTagList) ->
+ %ConfList=[{FunctionName,PatternList}|Rest]
+ StateRec = #gen_state{inc_tag_pattern=PartIncTlvTagList,
+ inc_type_pattern=ConfList},
+ save_config(gen_state,StateRec);
+save_gen_state(_,_) ->
+%% ok.
+ save_config(gen_state,#gen_state{}).
+
+save_gen_state(GenState) when record(GenState,gen_state) ->
+ save_config(gen_state,GenState).
+
+
+%% get_gen_state_field returns undefined if no gen_state exists or if
+%% Field is undefined or the data at the field.
+get_gen_state_field(Field) ->
+ case read_config_data(gen_state) of
+ undefined ->
+ undefined;
+ GenState ->
+ get_gen_state_field(GenState,Field)
+ end.
+get_gen_state_field(#gen_state{active=Active},active) ->
+ Active;
+get_gen_state_field(_,active) ->
+ false;
+get_gen_state_field(GS,prefix) ->
+ GS#gen_state.prefix;
+get_gen_state_field(GS,inc_tag_pattern) ->
+ GS#gen_state.inc_tag_pattern;
+get_gen_state_field(GS,tag_pattern) ->
+ GS#gen_state.tag_pattern;
+get_gen_state_field(GS,inc_type_pattern) ->
+ GS#gen_state.inc_type_pattern;
+get_gen_state_field(GS,type_pattern) ->
+ GS#gen_state.type_pattern;
+get_gen_state_field(GS,func_name) ->
+ GS#gen_state.func_name;
+get_gen_state_field(GS,namelist) ->
+ GS#gen_state.namelist;
+get_gen_state_field(GS,tobe_refed_funcs) ->
+ GS#gen_state.tobe_refed_funcs;
+get_gen_state_field(GS,gen_refed_funcs) ->
+ GS#gen_state.gen_refed_funcs.
+
+
+get_gen_state() ->
+ read_config_data(gen_state).
+
+
+update_gen_state(Field,Data) ->
+ case get_gen_state() of
+ State when record(State,gen_state) ->
+ update_gen_state(Field,State,Data);
+ _ ->
+ exit({error,{asn1,{internal,
+ "tried to update nonexistent gen_state",Field,Data}}})
+ end.
+update_gen_state(active,State,Data) ->
+ save_gen_state(State#gen_state{active=Data});
+update_gen_state(prefix,State,Data) ->
+ save_gen_state(State#gen_state{prefix=Data});
+update_gen_state(inc_tag_pattern,State,Data) ->
+ save_gen_state(State#gen_state{inc_tag_pattern=Data});
+update_gen_state(tag_pattern,State,Data) ->
+ save_gen_state(State#gen_state{tag_pattern=Data});
+update_gen_state(inc_type_pattern,State,Data) ->
+ save_gen_state(State#gen_state{inc_type_pattern=Data});
+update_gen_state(type_pattern,State,Data) ->
+ save_gen_state(State#gen_state{type_pattern=Data});
+update_gen_state(func_name,State,Data) ->
+ save_gen_state(State#gen_state{func_name=Data});
+update_gen_state(namelist,State,Data) ->
+% SData =
+% case Data of
+% [D] when list(D) -> D;
+% _ -> Data
+% end,
+ save_gen_state(State#gen_state{namelist=Data});
+update_gen_state(tobe_refed_funcs,State,Data) ->
+ save_gen_state(State#gen_state{tobe_refed_funcs=Data});
+update_gen_state(gen_refed_funcs,State,Data) ->
+ save_gen_state(State#gen_state{gen_refed_funcs=Data}).
+
+update_namelist(Name) ->
+ case get_gen_state_field(namelist) of
+ [Name,Rest] -> update_gen_state(namelist,Rest);
+ [Name|Rest] -> update_gen_state(namelist,Rest);
+ [{Name,List}] when list(List) -> update_gen_state(namelist,List);
+ [{Name,Atom}|Rest] when atom(Atom) -> update_gen_state(namelist,Rest);
+ Other -> Other
+ end.
+
+pop_namelist() ->
+ DeepTail = %% removes next element in order
+ fun([[{_,A}]|T],_Fun) when atom(A) -> T;
+ ([{_N,L}|T],_Fun) when list(L) -> [L|T];
+ ([[]|T],Fun) -> Fun(T,Fun);
+ ([L1|L2],Fun) when list(L1) ->
+ case lists:flatten(L1) of
+ [] -> Fun([L2],Fun);
+ _ -> [Fun(L1,Fun)|L2]
+ end;
+ ([_H|T],_Fun) -> T
+ end,
+ {Pop,NewNL} =
+ case get_gen_state_field(namelist) of
+ [] -> {[],[]};
+ L ->
+ {next_namelist_el(L),
+ DeepTail(L,DeepTail)}
+ end,
+ update_gen_state(namelist,NewNL),
+ Pop.
+
+%% next_namelist_el fetches the next type/component name in turn in
+%% the namelist, without changing the namelist.
+next_namelist_el() ->
+ case get_gen_state_field(namelist) of
+ undefined -> undefined;
+ L when list(L) -> next_namelist_el(L)
+ end.
+
+next_namelist_el([]) ->
+ [];
+next_namelist_el([L]) when list(L) ->
+ next_namelist_el(L);
+next_namelist_el([H|_]) when atom(H) ->
+ H;
+next_namelist_el([L|T]) when list(L) ->
+ case next_namelist_el(L) of
+ [] ->
+ next_namelist_el([T]);
+ R ->
+ R
+ end;
+next_namelist_el([H={_,A}|_]) when atom(A) ->
+ H.
+
+%% removes a bracket from the namelist
+step_in_constructed() ->
+ case get_gen_state_field(namelist) of
+ [L] when list(L) ->
+ update_gen_state(namelist,L);
+ _ -> ok
+ end.
+
+is_function_generated(Name) ->
+ case get_gen_state_field(gen_refed_funcs) of
+ L when list(L) ->
+ lists:member(Name,L);
+ _ ->
+ false
+ end.
+
+get_tobe_refed_func(Name) ->
+ case get_gen_state_field(tobe_refed_funcs) of
+ L when list(L) ->
+ case lists:keysearch(Name,1,L) of
+ {_,Element} ->
+ Element;
+ _ ->
+ undefined
+ end;
+ _ ->
+ undefined
+ end.
+
+add_tobe_refed_func(Data) ->
+ L = get_gen_state_field(tobe_refed_funcs),
+ update_gen_state(tobe_refed_funcs,[Data|L]).
+
+%% moves Name from the to be list to the generated list.
+generated_refed_func(Name) ->
+ L = get_gen_state_field(tobe_refed_funcs),
+ NewL = lists:keydelete(Name,1,L),
+ update_gen_state(tobe_refed_funcs,NewL),
+ L2 = get_gen_state_field(gen_refed_funcs),
+ update_gen_state(gen_refed_funcs,[Name|L2]).
+
+add_generated_refed_func(Data) ->
+ L = get_gen_state_field(gen_refed_funcs),
+ update_gen_state(gen_refed_funcs,[Data|L]).
+
+
+next_refed_func() ->
+ case get_gen_state_field(tobe_refed_funcs) of
+ [] ->
+ [];
+ [H|T] ->
+ update_gen_state(tobe_refed_funcs,T),
+ H
+ end.
+
+reset_gen_state() ->
+ save_gen_state(#gen_state{}).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_check.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_check.erl
new file mode 100644
index 0000000000..9da6611dba
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_check.erl
@@ -0,0 +1,5567 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_check.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_check).
+
+%% Main Module for ASN.1 compile time functions
+
+%-compile(export_all).
+-export([check/2,storeindb/1]).
+-include("asn1_records.hrl").
+%%% The tag-number for universal types
+-define(N_BOOLEAN, 1).
+-define(N_INTEGER, 2).
+-define(N_BIT_STRING, 3).
+-define(N_OCTET_STRING, 4).
+-define(N_NULL, 5).
+-define(N_OBJECT_IDENTIFIER, 6).
+-define(N_OBJECT_DESCRIPTOR, 7).
+-define(N_EXTERNAL, 8). % constructed
+-define(N_INSTANCE_OF,8).
+-define(N_REAL, 9).
+-define(N_ENUMERATED, 10).
+-define(N_EMBEDDED_PDV, 11). % constructed
+-define(N_SEQUENCE, 16).
+-define(N_SET, 17).
+-define(N_NumericString, 18).
+-define(N_PrintableString, 19).
+-define(N_TeletexString, 20).
+-define(N_VideotexString, 21).
+-define(N_IA5String, 22).
+-define(N_UTCTime, 23).
+-define(N_GeneralizedTime, 24).
+-define(N_GraphicString, 25).
+-define(N_VisibleString, 26).
+-define(N_GeneralString, 27).
+-define(N_UniversalString, 28).
+-define(N_CHARACTER_STRING, 29). % constructed
+-define(N_BMPString, 30).
+
+-define(TAG_PRIMITIVE(Num),
+ case S#state.erule of
+ ber_bin_v2 ->
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0};
+ _ -> []
+ end).
+-define(TAG_CONSTRUCTED(Num),
+ case S#state.erule of
+ ber_bin_v2 ->
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32};
+ _ -> []
+ end).
+
+-record(newt,{type=unchanged,tag=unchanged,constraint=unchanged,inlined=no}). % used in check_type to update type and tag
+-record(newv,{type=unchanged,value=unchanged}). % used in check_value to update type and value
+
+check(S,{Types,Values,ParameterizedTypes,Classes,Objects,ObjectSets}) ->
+ %%Predicates used to filter errors
+ TupleIs = fun({T,_},T) -> true;
+ (_,_) -> false
+ end,
+ IsClass = fun(X) -> TupleIs(X,asn1_class) end,
+ IsObjSet = fun(X) -> TupleIs(X,objectsetdef) end,
+ IsPObjSet = fun(X) -> TupleIs(X,pobjectsetdef) end,
+ IsObject = fun(X) -> TupleIs(X,objectdef) end,
+ IsValueSet = fun(X) -> TupleIs(X,valueset) end,
+ Element2 = fun(X) -> element(2,X) end,
+
+ _Perror = checkp(S,ParameterizedTypes,[]), % must do this before the templates are used
+ Terror = checkt(S,Types,[]),
+
+ %% get parameterized object sets sent to checkt/3
+ %% and update Terror
+
+ {PObjSetNames1,Terror2} = filter_errors(IsPObjSet,Terror),
+
+ Verror = checkv(S,Values ++ ObjectSets,[]), %value sets may be parsed as object sets
+
+ %% get information object classes wrongly sent to checkt/3
+ %% and update Terror2
+
+ {AddClasses,Terror3} = filter_errors(IsClass,Terror2),
+
+ NewClasses = Classes++AddClasses,
+
+ Cerror = checkc(S,NewClasses,[]),
+
+ %% get object sets incorrectly sent to checkv/3
+ %% and update Verror
+
+ {ObjSetNames,Verror2} = filter_errors(IsObjSet,Verror),
+
+ %% get parameterized object sets incorrectly sent to checkv/3
+ %% and update Verror2
+
+ {PObjSetNames,Verror3} = filter_errors(IsPObjSet,Verror2),
+
+ %% get objects incorrectly sent to checkv/3
+ %% and update Verror3
+
+ {ObjectNames,Verror4} = filter_errors(IsObject,Verror3),
+
+ NewObjects = Objects++ObjectNames,
+ NewObjectSets = ObjSetNames ++ PObjSetNames ++ PObjSetNames1,
+
+ %% get value sets
+ %% and update Verror4
+
+ {ValueSetNames,Verror5} = filter_errors(IsValueSet,Verror4),
+
+ asn1ct:create_ets_table(inlined_objects,[named_table]),
+ {Oerror,ExclO,ExclOS} = checko(S,NewObjects ++
+ NewObjectSets,
+ [],[],[]),
+ InlinedObjTuples = ets:tab2list(inlined_objects),
+ InlinedObjects = lists:map(Element2,InlinedObjTuples),
+ ets:delete(inlined_objects),
+
+ Exporterror = check_exports(S,S#state.module),
+ case {Terror3,Verror5,Cerror,Oerror,Exporterror} of
+ {[],[],[],[],[]} ->
+ ContextSwitchTs = context_switch_in_spec(),
+ InstanceOf = instance_of_in_spec(),
+ NewTypes = lists:subtract(Types,AddClasses) ++ ContextSwitchTs
+ ++ InstanceOf,
+ NewValues = lists:subtract(Values,PObjSetNames++ObjectNames++
+ ValueSetNames),
+ {ok,
+ {NewTypes,NewValues,ParameterizedTypes,
+ NewClasses,NewObjects,NewObjectSets},
+ {NewTypes,NewValues,ParameterizedTypes,NewClasses,
+ lists:subtract(NewObjects,ExclO)++InlinedObjects,
+ lists:subtract(NewObjectSets,ExclOS)}};
+ _ ->{error,{asn1,lists:flatten([Terror3,Verror5,Cerror,
+ Oerror,Exporterror])}}
+ end.
+
+context_switch_in_spec() ->
+ L = [{external,'EXTERNAL'},
+ {embedded_pdv,'EMBEDDED PDV'},
+ {character_string,'CHARACTER STRING'}],
+ F = fun({T,TName},Acc) ->
+ case get(T) of
+ generate -> erase(T),
+ [TName|Acc];
+ _ -> Acc
+ end
+ end,
+ lists:foldl(F,[],L).
+
+instance_of_in_spec() ->
+ case get(instance_of) of
+ generate ->
+ erase(instance_of),
+ ['INSTANCE OF'];
+ _ ->
+ []
+ end.
+
+filter_errors(Pred,ErrorList) ->
+ Element2 = fun(X) -> element(2,X) end,
+ RemovedTupleElements = lists:filter(Pred,ErrorList),
+ RemovedNames = lists:map(Element2,RemovedTupleElements),
+ %% remove value set name tuples from Verror
+ RestErrors = lists:subtract(ErrorList,RemovedTupleElements),
+ {RemovedNames,RestErrors}.
+
+
+check_exports(S,Module = #module{}) ->
+ case Module#module.exports of
+ {exports,[]} ->
+ [];
+ {exports,all} ->
+ [];
+ {exports,ExportList} when list(ExportList) ->
+ IsNotDefined =
+ fun(X) ->
+ case catch get_referenced_type(S,X) of
+ {error,{asn1,_}} ->
+ true;
+ _ -> false
+ end
+ end,
+ case lists:filter(IsNotDefined,ExportList) of
+ [] ->
+ [];
+ NoDefExp ->
+ GetName =
+ fun(T = #'Externaltypereference'{type=N})->
+ %%{exported,undefined,entity,N}
+ NewS=S#state{type=T,tname=N},
+ error({export,"exported undefined entity",NewS})
+ end,
+ lists:map(GetName,NoDefExp)
+ end
+ end.
+
+checkt(S,[Name|T],Acc) ->
+ %%io:format("check_typedef:~p~n",[Name]),
+ Result =
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({type,{internal_error,'???'},S});
+ Type when record(Type,typedef) ->
+ NewS = S#state{type=Type,tname=Name},
+ case catch(check_type(NewS,Type,Type#typedef.typespec)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1_class,_ClassDef} ->
+ {asn1_class,Name};
+ pobjectsetdef ->
+ {pobjectsetdef,Name};
+ pvalueset ->
+ {pvalueset,Name};
+ Ts ->
+ case Type#typedef.checked of
+ true -> % already checked and updated
+ ok;
+ _ ->
+ NewTypeDef = Type#typedef{checked=true,typespec = Ts},
+ %io:format("checkt:dbput:~p, ~p~n",[S#state.mname,NewTypeDef#typedef.name]),
+ asn1_db:dbput(NewS#state.mname,Name,NewTypeDef), % update the type
+ ok
+ end
+ end
+ end,
+ case Result of
+ ok ->
+ checkt(S,T,Acc);
+ _ ->
+ checkt(S,T,[Result|Acc])
+ end;
+checkt(S,[],Acc) ->
+ case check_contextswitchingtypes(S,[]) of
+ [] ->
+ lists:reverse(Acc);
+ L ->
+ checkt(S,L,Acc)
+ end.
+
+check_contextswitchingtypes(S,Acc) ->
+ CSTList=[{external,'EXTERNAL'},
+ {embedded_pdv,'EMBEDDED PDV'},
+ {character_string,'CHARACTER STRING'}],
+ check_contextswitchingtypes(S,CSTList,Acc).
+
+check_contextswitchingtypes(S,[{T,TName}|Ts],Acc) ->
+ case get(T) of
+ unchecked ->
+ put(T,generate),
+ check_contextswitchingtypes(S,Ts,[TName|Acc]);
+ _ ->
+ check_contextswitchingtypes(S,Ts,Acc)
+ end;
+check_contextswitchingtypes(_,[],Acc) ->
+ Acc.
+
+checkv(S,[Name|T],Acc) ->
+ %%io:format("check_valuedef:~p~n",[Name]),
+ Result = case asn1_db:dbget(S#state.mname,Name) of
+ undefined -> error({value,{internal_error,'???'},S});
+ Value when record(Value,valuedef);
+ record(Value,typedef); %Value set may be parsed as object set.
+ record(Value,pvaluedef);
+ record(Value,pvaluesetdef) ->
+ NewS = S#state{value=Value},
+ case catch(check_value(NewS,Value)) of
+ {error,Reason} ->
+ error({value,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({value,{internal_error,Reason},NewS});
+ {pobjectsetdef} ->
+ {pobjectsetdef,Name};
+ {objectsetdef} ->
+ {objectsetdef,Name};
+ {objectdef} ->
+ %% this is an object, save as typedef
+ #valuedef{checked=C,pos=Pos,name=N,type=Type,
+ value=Def}=Value,
+% Currmod = S#state.mname,
+% #type{def=
+% #'Externaltypereference'{module=Mod,
+% type=CName}} = Type,
+ ClassName =
+ Type#type.def,
+% case Mod of
+% Currmod ->
+% {objectclassname,CName};
+% _ ->
+% {objectclassname,Mod,CName}
+% end,
+ NewSpec = #'Object'{classname=ClassName,
+ def=Def},
+ NewDef = #typedef{checked=C,pos=Pos,name=N,
+ typespec=NewSpec},
+ asn1_db:dbput(NewS#state.mname,Name,NewDef),
+ {objectdef,Name};
+ {valueset,VSet} ->
+ Pos = asn1ct:get_pos_of_def(Value),
+ CheckedVSDef = #typedef{checked=true,pos=Pos,
+ name=Name,typespec=VSet},
+ asn1_db:dbput(NewS#state.mname,Name,CheckedVSDef),
+ {valueset,Name};
+ V ->
+ %% update the valuedef
+ asn1_db:dbput(NewS#state.mname,Name,V),
+ ok
+ end
+ end,
+ case Result of
+ ok ->
+ checkv(S,T,Acc);
+ _ ->
+ checkv(S,T,[Result|Acc])
+ end;
+checkv(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+
+checkp(S,[Name|T],Acc) ->
+ %io:format("check_ptypedef:~p~n",[Name]),
+ Result = case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({type,{internal_error,'???'},S});
+ Type when record(Type,ptypedef) ->
+ NewS = S#state{type=Type,tname=Name},
+ case catch(check_ptype(NewS,Type,Type#ptypedef.typespec)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1_class,_ClassDef} ->
+ {asn1_class,Name};
+ Ts ->
+ NewType = Type#ptypedef{checked=true,typespec = Ts},
+ asn1_db:dbput(NewS#state.mname,Name,NewType), % update the type
+ ok
+ end
+ end,
+ case Result of
+ ok ->
+ checkp(S,T,Acc);
+ _ ->
+ checkp(S,T,[Result|Acc])
+ end;
+checkp(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+
+
+
+checkc(S,[Name|Cs],Acc) ->
+ Result =
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({class,{internal_error,'???'},S});
+ Class ->
+ ClassSpec = if
+ record(Class,classdef) ->
+ Class#classdef.typespec;
+ record(Class,typedef) ->
+ Class#typedef.typespec
+ end,
+ NewS = S#state{type=Class,tname=Name},
+ case catch(check_class(NewS,ClassSpec)) of
+ {error,Reason} ->
+ error({class,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({class,{internal_error,Reason},NewS});
+ C ->
+ %% update the classdef
+ NewClass =
+ if
+ record(Class,classdef) ->
+ Class#classdef{checked=true,typespec=C};
+ record(Class,typedef) ->
+ #classdef{checked=true,name=Name,typespec=C}
+ end,
+ asn1_db:dbput(NewS#state.mname,Name,NewClass),
+ ok
+ end
+ end,
+ case Result of
+ ok ->
+ checkc(S,Cs,Acc);
+ _ ->
+ checkc(S,Cs,[Result|Acc])
+ end;
+checkc(_S,[],Acc) ->
+%% include_default_class(S#state.mname),
+ lists:reverse(Acc).
+
+checko(S,[Name|Os],Acc,ExclO,ExclOS) ->
+ Result =
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({type,{internal_error,'???'},S});
+ Object when record(Object,typedef) ->
+ NewS = S#state{type=Object,tname=Name},
+ case catch(check_object(NewS,Object,Object#typedef.typespec)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1,Reason} ->
+ error({type,Reason,NewS});
+ O ->
+ NewObj = Object#typedef{checked=true,typespec=O},
+ asn1_db:dbput(NewS#state.mname,Name,NewObj),
+ if
+ record(O,'Object') ->
+ case O#'Object'.gen of
+ true ->
+ {ok,ExclO,ExclOS};
+ false ->
+ {ok,[Name|ExclO],ExclOS}
+ end;
+ record(O,'ObjectSet') ->
+ case O#'ObjectSet'.gen of
+ true ->
+ {ok,ExclO,ExclOS};
+ false ->
+ {ok,ExclO,[Name|ExclOS]}
+ end
+ end
+ end;
+ PObject when record(PObject,pobjectdef) ->
+ NewS = S#state{type=PObject,tname=Name},
+ case (catch check_pobject(NewS,PObject)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1,Reason} ->
+ error({type,Reason,NewS});
+ PO ->
+ NewPObj = PObject#pobjectdef{def=PO},
+ asn1_db:dbput(NewS#state.mname,Name,NewPObj),
+ {ok,[Name|ExclO],ExclOS}
+ end;
+ PObjSet when record(PObjSet,pvaluesetdef) ->
+ %% this is a parameterized object set. Might be a parameterized
+ %% value set, couldn't it?
+ NewS = S#state{type=PObjSet,tname=Name},
+ case (catch check_pobjectset(NewS,PObjSet)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1,Reason} ->
+ error({type,Reason,NewS});
+ POS ->
+ %%NewPObjSet = PObjSet#pvaluesetdef{valueset=POS},
+ asn1_db:dbput(NewS#state.mname,Name,POS),
+ {ok,ExclO,[Name|ExclOS]}
+ end
+ end,
+ case Result of
+ {ok,NewExclO,NewExclOS} ->
+ checko(S,Os,Acc,NewExclO,NewExclOS);
+ _ ->
+ checko(S,Os,[Result|Acc],ExclO,ExclOS)
+ end;
+checko(_S,[],Acc,ExclO,ExclOS) ->
+ {lists:reverse(Acc),lists:reverse(ExclO),lists:reverse(ExclOS)}.
+
+check_class(S,CDef=#classdef{checked=Ch,name=Name,typespec=TS}) ->
+ case Ch of
+ true -> TS;
+ idle -> TS;
+ _ ->
+ NewCDef = CDef#classdef{checked=idle},
+ asn1_db:dbput(S#state.mname,Name,NewCDef),
+ CheckedTS = check_class(S,TS),
+ asn1_db:dbput(S#state.mname,Name,
+ NewCDef#classdef{checked=true,
+ typespec=CheckedTS}),
+ CheckedTS
+ end;
+check_class(S = #state{mname=M,tname=T},ClassSpec)
+ when record(ClassSpec,type) ->
+ Def = ClassSpec#type.def,
+ case Def of
+ #'Externaltypereference'{module=M,type=T} ->
+ #objectclass{fields=Def}; % in case of recursive definitions
+ Tref when record(Tref,'Externaltypereference') ->
+ {_,RefType} = get_referenced_type(S,Tref),
+% case RefType of
+% RefClass when record(RefClass,classdef) ->
+% check_class(S,RefClass#classdef.typespec)
+% end
+ case is_class(S,RefType) of
+ true ->
+ check_class(S,get_class_def(S,RefType));
+ _ ->
+ error({class,{internal_error,RefType},S})
+ end
+ end;
+% check_class(S,{objectclassname,ModuleName,ClassName}) when atom(ModuleName),atom(ClassName) ->
+% 'fix this';
+check_class(S,C) when record(C,objectclass) ->
+ NewFieldSpec = check_class_fields(S,C#objectclass.fields),
+ C#objectclass{fields=NewFieldSpec};
+%check_class(S,{objectclassname,ClassName}) ->
+check_class(S,ClassName) ->
+ {_,Def} = get_referenced_type(S,ClassName),
+ case Def of
+ ClassDef when record(ClassDef,classdef) ->
+ case ClassDef#classdef.checked of
+ true ->
+ ClassDef#classdef.typespec;
+ idle ->
+ ClassDef#classdef.typespec;
+ false ->
+ check_class(S,ClassDef#classdef.typespec)
+ end;
+ TypeDef when record(TypeDef,typedef) ->
+ %% this case may occur when a definition is a reference
+ %% to a class definition.
+ case TypeDef#typedef.typespec of
+ #type{def=Ext} when record(Ext,'Externaltypereference') ->
+ check_class(S,Ext)
+ end
+ end;
+check_class(_S,{poc,_ObjSet,_Params}) ->
+ 'fix this later'.
+
+check_class_fields(S,Fields) ->
+ check_class_fields(S,Fields,[]).
+
+check_class_fields(S,[F|Fields],Acc) ->
+ NewField =
+ case element(1,F) of
+ fixedtypevaluefield ->
+ {_,Name,Type,Unique,OSpec} = F,
+ RefType = check_type(S,#typedef{typespec=Type},Type),
+ {fixedtypevaluefield,Name,RefType,Unique,OSpec};
+ object_or_fixedtypevalue_field ->
+ {_,Name,Type,Unique,OSpec} = F,
+ Cat =
+ case asn1ct_gen:type(asn1ct_gen:get_inner(Type#type.def)) of
+ Def when record(Def,typereference);
+ record(Def,'Externaltypereference') ->
+ {_,D} = get_referenced_type(S,Def),
+ D;
+ {undefined,user} ->
+ %% neither of {primitive,bif} or {constructed,bif}
+%% {_,D} = get_referenced_type(S,#typereference{val=Type#type.def}),
+ {_,D} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=Type#type.def}),
+ D;
+ _ ->
+ Type
+ end,
+ case Cat of
+ Class when record(Class,classdef) ->
+ {objectfield,Name,Type,Unique,OSpec};
+ _ ->
+ RefType = check_type(S,#typedef{typespec=Type},Type),
+ {fixedtypevaluefield,Name,RefType,Unique,OSpec}
+ end;
+ objectset_or_fixedtypevalueset_field ->
+ {_,Name,Type,OSpec} = F,
+%% RefType = check_type(S,#typedef{typespec=Type},Type),
+ RefType =
+ case (catch check_type(S,#typedef{typespec=Type},Type)) of
+ {asn1_class,_ClassDef} ->
+ case if_current_checked_type(S,Type) of
+ true ->
+ Type#type.def;
+ _ ->
+ check_class(S,Type)
+ end;
+ CheckedType when record(CheckedType,type) ->
+ CheckedType;
+ _ ->
+ error({class,"internal error, check_class_fields",S})
+ end,
+ if
+ record(RefType,'Externaltypereference') ->
+ {objectsetfield,Name,Type,OSpec};
+ record(RefType,classdef) ->
+ {objectsetfield,Name,Type,OSpec};
+ record(RefType,objectclass) ->
+ {objectsetfield,Name,Type,OSpec};
+ true ->
+ {fixedtypevaluesetfield,Name,RefType,OSpec}
+ end;
+ typefield ->
+ case F of
+ {TF,Name,{'DEFAULT',Type}} ->
+ {TF,Name,{'DEFAULT',check_type(S,#typedef{typespec=Type},Type)}};
+ _ -> F
+ end;
+ _ -> F
+ end,
+ check_class_fields(S,Fields,[NewField|Acc]);
+check_class_fields(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+if_current_checked_type(S,#type{def=Def}) ->
+ CurrentCheckedName = S#state.tname,
+ MergedModules = S#state.inputmodules,
+ % CurrentCheckedModule = S#state.mname,
+ case Def of
+ #'Externaltypereference'{module=CurrentCheckedName,
+ type=CurrentCheckedName} ->
+ true;
+ #'Externaltypereference'{module=ModuleName,
+ type=CurrentCheckedName} ->
+ case MergedModules of
+ undefined ->
+ false;
+ _ ->
+ lists:member(ModuleName,MergedModules)
+ end;
+ _ ->
+ false
+ end.
+
+
+
+check_pobject(_S,PObject) when record(PObject,pobjectdef) ->
+ Def = PObject#pobjectdef.def,
+ Def.
+
+
+check_pobjectset(S,PObjSet) ->
+ #pvaluesetdef{pos=Pos,name=Name,args=Args,type=Type,
+ valueset=ValueSet}=PObjSet,
+ {Mod,Def} = get_referenced_type(S,Type#type.def),
+ case Def of
+ #classdef{} ->
+ ClassName = #'Externaltypereference'{module=Mod,
+ type=Def#classdef.name},
+ {valueset,Set} = ValueSet,
+% ObjectSet = #'ObjectSet'{class={objectclassname,ClassName},
+ ObjectSet = #'ObjectSet'{class=ClassName,
+ set=Set},
+ #pobjectsetdef{pos=Pos,name=Name,args=Args,class=Type#type.def,
+ def=ObjectSet};
+ _ ->
+ PObjSet
+ end.
+
+check_object(_S,ObjDef,ObjSpec) when (ObjDef#typedef.checked == true) ->
+ ObjSpec;
+check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
+ {_,_ClassDef} = get_referenced_type(S,ClassRef),
+ NewClassRef = check_externaltypereference(S,ClassRef),
+ ClassDef =
+ case _ClassDef#classdef.checked of
+ false ->
+ #classdef{checked=true,
+ typespec=check_class(S,_ClassDef#classdef.typespec)};
+ _ ->
+ _ClassDef
+ end,
+ NewObj =
+ case ObjectDef of
+ Def when tuple(Def), (element(1,Def)==object) ->
+ NewSettingList = check_objectdefn(S,Def,ClassDef),
+ #'Object'{def=NewSettingList};
+% Def when tuple(Def), (element(1,Def)=='ObjectFromObject') ->
+% fixa;
+ {po,{object,DefObj},ArgsList} ->
+ {_,Object} = get_referenced_type(S,DefObj),%DefObj is a
+ %%#'Externalvaluereference' or a #'Externaltypereference'
+ %% Maybe this call should be catched and in case of an exception
+ %% an nonallocated parameterized object should be returned.
+ instantiate_po(S,ClassDef,Object,ArgsList);
+ #'Externalvaluereference'{} ->
+ {_,Object} = get_referenced_type(S,ObjectDef),
+ check_object(S,Object,Object#typedef.typespec);
+ _ ->
+ exit({error,{no_object,ObjectDef},S})
+ end,
+ Gen = gen_incl(S,NewObj#'Object'.def,
+ (ClassDef#classdef.typespec)#objectclass.fields),
+ NewObj#'Object'{classname=NewClassRef,gen=Gen};
+
+%%check_object(S,ObjSetDef,ObjSet=#type{def={pt,ObjSetRef,Args}}) ->
+ %% A parameterized
+
+check_object(S,
+ _ObjSetDef,
+ ObjSet=#'ObjectSet'{class=ClassRef}) ->
+ {_,ClassDef} = get_referenced_type(S,ClassRef),
+ NewClassRef = check_externaltypereference(S,ClassRef),
+ UniqueFieldName =
+ case (catch get_unique_fieldname(ClassDef)) of
+ {error,'__undefined_'} -> {unique,undefined};
+ {asn1,Msg,_} -> error({class,Msg,S});
+ Other -> Other
+ end,
+ NewObjSet=
+ case ObjSet#'ObjectSet'.set of
+ {'SingleValue',Set} when list(Set) ->
+ CheckedSet = check_object_list(S,NewClassRef,Set),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ {'SingleValue',{definedvalue,ObjName}} ->
+ {_,ObjDef} = get_referenced_type(S,#identifier{val=ObjName}),
+ #'Object'{def=CheckedObj} =
+ check_object(S,ObjDef,ObjDef#typedef.typespec),
+ NewSet = get_unique_valuelist(S,[{ObjDef#typedef.name,
+ CheckedObj}],
+ UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ {'SingleValue',#'Externalvaluereference'{value=ObjName}} ->
+ {_,ObjDef} = get_referenced_type(S,#identifier{val=ObjName}),
+ #'Object'{def=CheckedObj} =
+ check_object(S,ObjDef,ObjDef#typedef.typespec),
+ NewSet = get_unique_valuelist(S,[{ObjDef#typedef.name,
+ CheckedObj}],
+ UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ ['EXTENSIONMARK'] ->
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=['EXTENSIONMARK']};
+ Set when list(Set) ->
+ CheckedSet = check_object_list(S,NewClassRef,Set),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ {Set,Ext} when list(Set) ->
+ CheckedSet = check_object_list(S,NewClassRef,Set++Ext),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet++['EXTENSIONMARK']};
+ {{'SingleValue',Set},Ext} ->
+ CheckedSet = check_object_list(S,NewClassRef,
+ merge_sets(Set,Ext)),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet++['EXTENSIONMARK']};
+ {Type,{'EXCEPT',Exclusion}} when record(Type,type) ->
+ {_,TDef} = get_referenced_type(S,Type#type.def),
+ OS = TDef#typedef.typespec,
+ NewSet = reduce_objectset(OS#'ObjectSet'.set,Exclusion),
+ NewOS = OS#'ObjectSet'{set=NewSet},
+ check_object(S,TDef#typedef{typespec=NewOS},
+ NewOS);
+ #type{def={pt,DefinedObjSet,ParamList}} ->
+ {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
+ instantiate_pos(S,ClassDef,PObjSetDef,ParamList);
+ {ObjDef={object,definedsyntax,_ObjFields},_Ext} ->
+ CheckedSet = check_object_list(S,NewClassRef,[ObjDef]),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet++['EXTENSIONMARK']}
+ end,
+ Gen = gen_incl_set(S,NewObjSet#'ObjectSet'.set,
+ ClassDef),
+ NewObjSet#'ObjectSet'{class=NewClassRef,gen=Gen}.
+
+
+merge_sets(Set,Ext) when list(Set),list(Ext) ->
+ Set ++ Ext;
+merge_sets(Set,Ext) when list(Ext) ->
+ [Set|Ext];
+merge_sets(Set,{'SingleValue',Ext}) when list(Set) ->
+ Set ++ [Ext];
+merge_sets(Set,{'SingleValue',Ext}) ->
+ [Set] ++ [Ext].
+
+reduce_objectset(ObjectSet,Exclusion) ->
+ case Exclusion of
+ {'SingleValue',#'Externalvaluereference'{value=Name}} ->
+ case lists:keysearch(Name,1,ObjectSet) of
+ {value,El} ->
+ lists:subtract(ObjectSet,[El]);
+ _ ->
+ ObjectSet
+ end
+ end.
+
+%% Checks a list of objects or object sets and returns a list of selected
+%% information for the code generation.
+check_object_list(S,ClassRef,ObjectList) ->
+ check_object_list(S,ClassRef,ObjectList,[]).
+
+check_object_list(S,ClassRef,[ObjOrSet|Objs],Acc) ->
+ case ObjOrSet of
+ ObjDef when tuple(ObjDef),(element(1,ObjDef)==object) ->
+ Def =
+ check_object(S,#typedef{typespec=ObjDef},
+% #'Object'{classname={objectclassname,ClassRef},
+ #'Object'{classname=ClassRef,
+ def=ObjDef}),
+ check_object_list(S,ClassRef,Objs,[{no_name,Def#'Object'.def}|Acc]);
+ {'SingleValue',{definedvalue,ObjName}} ->
+ {_,ObjectDef} = get_referenced_type(S,#identifier{val=ObjName}),
+ #'Object'{def=Def} = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
+ check_object_list(S,ClassRef,Objs,[{ObjectDef#typedef.name,Def}|Acc]);
+ {'SingleValue',Ref = #'Externalvaluereference'{}} ->
+ {_,ObjectDef} = get_referenced_type(S,Ref),
+ #'Object'{def=Def} = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
+ check_object_list(S,ClassRef,Objs,[{ObjectDef#typedef.name,Def}|Acc]);
+ ObjRef when record(ObjRef,'Externalvaluereference') ->
+ {_,ObjectDef} = get_referenced_type(S,ObjRef),
+ #'Object'{def=Def} = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
+ check_object_list(S,ClassRef,Objs,
+%% [{ObjRef#'Externalvaluereference'.value,Def}|Acc]);
+ [{ObjectDef#typedef.name,Def}|Acc]);
+ {'ValueFromObject',{_,Object},FieldName} ->
+ {_,Def} = get_referenced_type(S,Object),
+%% TypeOrVal = get_fieldname_element(S,Def,FieldName);%% this must result in an object set
+ TypeDef = get_fieldname_element(S,Def,FieldName),
+ (TypeDef#typedef.typespec)#'ObjectSet'.set;
+ ObjSet when record(ObjSet,type) ->
+ ObjSetDef =
+ case ObjSet#type.def of
+ Ref when record(Ref,typereference);
+ record(Ref,'Externaltypereference') ->
+ {_,D} = get_referenced_type(S,ObjSet#type.def),
+ D;
+ Other ->
+ throw({asn1_error,{'unknown objecset',Other,S}})
+ end,
+ #'ObjectSet'{set=ObjectsInSet} =
+ check_object(S,ObjSetDef,ObjSetDef#typedef.typespec),
+ AccList = transform_set_to_object_list(ObjectsInSet,[]),
+ check_object_list(S,ClassRef,Objs,AccList++Acc);
+ union ->
+ check_object_list(S,ClassRef,Objs,Acc);
+ Other ->
+ exit({error,{'unknown object',Other},S})
+ end;
+%% Finally reverse the accumulated list and if there are any extension
+%% marks in the object set put one indicator of that in the end of the
+%% list.
+check_object_list(_,_,[],Acc) ->
+ lists:reverse(Acc).
+%% case lists:member('EXTENSIONMARK',RevAcc) of
+%% true ->
+%% ExclRevAcc = lists:filter(fun(X)->X /= 'EXTENSIONMARK' end,
+%% RevAcc),
+%% ExclRevAcc ++ ['EXTENSIONMARK'];
+%% false ->
+%% RevAcc
+%% end.
+
+
+%% get_fieldname_element/3
+%% gets the type/value/object/... of the referenced element in FieldName
+%% FieldName is a list and may have more than one element.
+%% Each element in FieldName can be either {typefieldreference,AnyFieldName}
+%% or {valuefieldreference,AnyFieldName}
+%% Def is the def of the first object referenced by FieldName
+get_fieldname_element(S,Def,[{_RefType,FieldName}]) when record(Def,typedef) ->
+ {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
+ case lists:keysearch(FieldName,1,ObjComps) of
+ {value,{_,TDef}} when record(TDef,typedef) ->
+ %% ORec = TDef#typedef.typespec, %% XXX This must be made general
+% case TDef#typedef.typespec of
+% ObjSetRec when record(ObjSetRec,'ObjectSet') ->
+% ObjSet = ObjSetRec#'ObjectSet'.set;
+% ObjRec when record(ObjRec,'Object') ->
+% %% now get the field in ObjRec that RestFName points out
+% %ObjRec
+% TDef
+% end;
+ TDef;
+ {value,{_,VDef}} when record(VDef,valuedef) ->
+ check_value(S,VDef);
+ _ ->
+ throw({assigned_object_error,"not_assigned_object",S})
+ end;
+get_fieldname_element(_S,Def,[{_RefType,_FieldName}|_RestFName])
+ when record(Def,typedef) ->
+ ok.
+
+transform_set_to_object_list([{Name,_UVal,Fields}|Objs],Acc) ->
+ transform_set_to_object_list(Objs,[{Name,{object,generatesyntax,Fields}}|Acc]);
+transform_set_to_object_list(['EXTENSIONMARK'|Objs],Acc) ->
+%% transform_set_to_object_list(Objs,['EXTENSIONMARK'|Acc]);
+ transform_set_to_object_list(Objs,Acc);
+transform_set_to_object_list([],Acc) ->
+ Acc.
+
+get_unique_valuelist(_S,ObjSet,{unique,undefined}) -> % no unique field in object
+ lists:map(fun({N,{_,_,F}})->{N,F};
+ (V={_,_,_}) ->V end, ObjSet);
+get_unique_valuelist(S,ObjSet,UFN) ->
+ get_unique_vlist(S,ObjSet,UFN,[]).
+
+get_unique_vlist(S,[],_,Acc) ->
+ case catch check_uniqueness(Acc) of
+ {asn1_error,_} ->
+% exit({error,Reason,S});
+ error({'ObjectSet',"not unique objects in object set",S});
+ true ->
+ lists:reverse(Acc)
+ end;
+get_unique_vlist(S,[{ObjName,Obj}|Rest],UniqueFieldName,Acc) ->
+ {_,_,Fields} = Obj,
+ VDef = get_unique_value(S,Fields,UniqueFieldName),
+ get_unique_vlist(S,Rest,UniqueFieldName,
+ [{ObjName,VDef#valuedef.value,Fields}|Acc]);
+get_unique_vlist(S,[V={_,_,_}|Rest],UniqueFieldName,Acc) ->
+ get_unique_vlist(S,Rest,UniqueFieldName,[V|Acc]).
+
+get_unique_value(S,Fields,UniqueFieldName) ->
+ Module = S#state.mname,
+ case lists:keysearch(UniqueFieldName,1,Fields) of
+ {value,Field} ->
+ case element(2,Field) of
+ VDef when record(VDef,valuedef) ->
+ VDef;
+ {definedvalue,ValName} ->
+ ValueDef = asn1_db:dbget(Module,ValName),
+ case ValueDef of
+ VDef when record(VDef,valuedef) ->
+ ValueDef;
+ undefined ->
+ #valuedef{value=ValName}
+ end;
+ {'ValueFromObject',Object,Name} ->
+ case Object of
+ {object,Ext} when record(Ext,'Externaltypereference') ->
+ OtherModule = Ext#'Externaltypereference'.module,
+ ExtObjName = Ext#'Externaltypereference'.type,
+ ObjDef = asn1_db:dbget(OtherModule,ExtObjName),
+ ObjSpec = ObjDef#typedef.typespec,
+ get_unique_value(OtherModule,element(3,ObjSpec),Name);
+ {object,{_,_,ObjName}} ->
+ ObjDef = asn1_db:dbget(Module,ObjName),
+ ObjSpec = ObjDef#typedef.typespec,
+ get_unique_value(Module,element(3,ObjSpec),Name);
+ {po,Object,_Params} ->
+ exit({error,{'parameterized object not implemented yet',
+ Object},S})
+ end;
+ Value when atom(Value);number(Value) ->
+ #valuedef{value=Value};
+ {'CHOICE',{_,Value}} when atom(Value);number(Value) ->
+ #valuedef{value=Value}
+ end;
+ false ->
+ exit({error,{'no unique value',Fields,UniqueFieldName},S})
+%% io:format("WARNING: no unique value in object"),
+%% exit(uniqueFieldName)
+ end.
+
+check_uniqueness(NameValueList) ->
+ check_uniqueness1(lists:keysort(2,NameValueList)).
+
+check_uniqueness1([]) ->
+ true;
+check_uniqueness1([_]) ->
+ true;
+check_uniqueness1([{_,N,_},{_,N,_}|_Rest]) ->
+ throw({asn1_error,{'objects in set must have unique values in UNIQUE fields',N}});
+check_uniqueness1([_|Rest]) ->
+ check_uniqueness1(Rest).
+
+%% instantiate_po/4
+%% ClassDef is the class of Object,
+%% Object is the Parameterized object, which is referenced,
+%% ArgsList is the list of actual parameters
+%% returns an #'Object' record.
+instantiate_po(S,_ClassDef,Object,ArgsList) when record(Object,pobjectdef) ->
+ FormalParams = get_pt_args(Object),
+ MatchedArgs = match_args(FormalParams,ArgsList,[]),
+ NewS = S#state{type=Object,parameters=MatchedArgs},
+ check_object(NewS,Object,#'Object'{classname=Object#pobjectdef.class,
+ def=Object#pobjectdef.def}).
+
+%% instantiate_pos/4
+%% ClassDef is the class of ObjectSetDef,
+%% ObjectSetDef is the Parameterized object set, which is referenced
+%% on the right side of the assignment,
+%% ArgsList is the list of actual parameters, i.e. real objects
+instantiate_pos(S,ClassDef,ObjectSetDef,ArgsList) ->
+ ClassName = ClassDef#classdef.name,
+ FormalParams = get_pt_args(ObjectSetDef),
+ Set = case get_pt_spec(ObjectSetDef) of
+ {valueset,_Set} -> _Set;
+ _Set -> _Set
+ end,
+ MatchedArgs = match_args(FormalParams,ArgsList,[]),
+ NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs},
+ check_object(NewS,ObjectSetDef,
+ #'ObjectSet'{class=name2Extref(S#state.mname,ClassName),
+ set=Set}).
+
+
+%% gen_incl -> boolean()
+%% If object with Fields has any of the corresponding class' typefields
+%% then return value is true otherwise it is false.
+%% If an object lacks a typefield but the class has a type field that
+%% is OPTIONAL then we want gen to be true
+gen_incl(S,{_,_,Fields},CFields)->
+ gen_incl1(S,Fields,CFields).
+
+gen_incl1(_,_,[]) ->
+ false;
+gen_incl1(S,Fields,[C|CFields]) ->
+ case element(1,C) of
+ typefield ->
+% case lists:keymember(element(2,C),1,Fields) of
+% true ->
+% true;
+% false ->
+% gen_incl1(S,Fields,CFields)
+% end;
+ true; %% should check that field is OPTIONAL or DEFUALT if
+ %% the object lacks this field
+ objectfield ->
+ case lists:keysearch(element(2,C),1,Fields) of
+ {value,Field} ->
+ Type = element(3,C),
+ {_,ClassDef} = get_referenced_type(S,Type#type.def),
+% {_,ClassFields,_} = ClassDef#classdef.typespec,
+ #objectclass{fields=ClassFields} =
+ ClassDef#classdef.typespec,
+ ObjTDef = element(2,Field),
+ case gen_incl(S,(ObjTDef#typedef.typespec)#'Object'.def,
+ ClassFields) of
+ true ->
+ true;
+ _ ->
+ gen_incl1(S,Fields,CFields)
+ end;
+ _ ->
+ gen_incl1(S,Fields,CFields)
+ end;
+ _ ->
+ gen_incl1(S,Fields,CFields)
+ end.
+
+%% first if no unique field in the class return false.(don't generate code)
+gen_incl_set(S,Fields,ClassDef) ->
+ case catch get_unique_fieldname(ClassDef) of
+ Tuple when tuple(Tuple) ->
+ false;
+ _ ->
+ gen_incl_set1(S,Fields,
+ (ClassDef#classdef.typespec)#objectclass.fields)
+ end.
+
+%% if any of the existing or potentially existing objects has a typefield
+%% then return true.
+gen_incl_set1(_,[],_CFields)->
+ false;
+gen_incl_set1(_,['EXTENSIONMARK'],_) ->
+ true;
+%% Fields are the fields of an object in the object set.
+%% CFields are the fields of the class of the object set.
+gen_incl_set1(S,[Object|Rest],CFields)->
+ Fields = element(size(Object),Object),
+ case gen_incl1(S,Fields,CFields) of
+ true ->
+ true;
+ false ->
+ gen_incl_set1(S,Rest,CFields)
+ end.
+
+check_objectdefn(S,Def,CDef) when record(CDef,classdef) ->
+ WithSyntax = (CDef#classdef.typespec)#objectclass.syntax,
+ ClassFields = (CDef#classdef.typespec)#objectclass.fields,
+ case Def of
+ {object,defaultsyntax,Fields} ->
+ check_defaultfields(S,Fields,ClassFields);
+ {object,definedsyntax,Fields} ->
+ {_,WSSpec} = WithSyntax,
+ NewFields =
+ case catch( convert_definedsyntax(S,Fields,WSSpec,
+ ClassFields,[])) of
+ {asn1,{_ErrorType,ObjToken,ClassToken}} ->
+ throw({asn1,{'match error in object',ObjToken,
+ 'found in object',ClassToken,'found in class'}});
+ Err={asn1,_} -> throw(Err);
+ Err={'EXIT',_} -> throw(Err);
+ DefaultFields when list(DefaultFields) ->
+ DefaultFields
+ end,
+ {object,defaultsyntax,NewFields};
+ {object,_ObjectId} -> % This is a DefinedObject
+ fixa;
+ Other ->
+ exit({error,{objectdefn,Other}})
+ end.
+
+check_defaultfields(S,Fields,ClassFields) ->
+ check_defaultfields(S,Fields,ClassFields,[]).
+
+check_defaultfields(_S,[],_ClassFields,Acc) ->
+ {object,defaultsyntax,lists:reverse(Acc)};
+check_defaultfields(S,[{FName,Spec}|Fields],ClassFields,Acc) ->
+ case lists:keysearch(FName,2,ClassFields) of
+ {value,CField} ->
+ NewField = convert_to_defaultfield(S,FName,Spec,CField),
+ check_defaultfields(S,Fields,ClassFields,[NewField|Acc]);
+ _ ->
+ throw({error,{asn1,{'unvalid field in object',FName}}})
+ end.
+%% {object,defaultsyntax,Fields}.
+
+convert_definedsyntax(_S,[],[],_ClassFields,Acc) ->
+ lists:reverse(Acc);
+convert_definedsyntax(S,Fields,WithSyntax,ClassFields,Acc) ->
+ case match_field(S,Fields,WithSyntax,ClassFields) of
+ {MatchedField,RestFields,RestWS} ->
+ if
+ list(MatchedField) ->
+ convert_definedsyntax(S,RestFields,RestWS,ClassFields,
+ lists:append(MatchedField,Acc));
+ true ->
+ convert_definedsyntax(S,RestFields,RestWS,ClassFields,
+ [MatchedField|Acc])
+ end
+%% throw({error,{asn1,{'unvalid syntax in object',WorS}}})
+ end.
+
+match_field(S,Fields,WithSyntax,ClassFields) ->
+ match_field(S,Fields,WithSyntax,ClassFields,[]).
+
+match_field(S,Fields,[W|Ws],ClassFields,Acc) when list(W) ->
+ case catch(match_optional_field(S,Fields,W,ClassFields,[])) of
+ {'EXIT',_} ->
+ match_field(Fields,Ws,ClassFields,Acc); %% add S
+%% {[Result],RestFields} ->
+%% {Result,RestFields,Ws};
+ {Result,RestFields} when list(Result) ->
+ {Result,RestFields,Ws};
+ _ ->
+ match_field(S,Fields,Ws,ClassFields,Acc)
+ end;
+match_field(S,Fields,WithSyntax,ClassFields,_Acc) ->
+ match_mandatory_field(S,Fields,WithSyntax,ClassFields,[]).
+
+match_optional_field(_S,RestFields,[],_,Ret) ->
+ {Ret,RestFields};
+%% An additional optional field within an optional field
+match_optional_field(S,Fields,[W|Ws],ClassFields,Ret) when list(W) ->
+ case catch match_optional_field(S,Fields,W,ClassFields,[]) of
+ {'EXIT',_} ->
+ {Ret,Fields};
+ {asn1,{optional_matcherror,_,_}} ->
+ {Ret,Fields};
+ {OptionalField,RestFields} ->
+ match_optional_field(S,RestFields,Ws,ClassFields,
+ lists:append(OptionalField,Ret))
+ end;
+%% identify and skip word
+%match_optional_field(S,[#'Externaltypereference'{type=WorS}|Rest],
+match_optional_field(S,[{_,_,WorS}|Rest],
+ [WorS|Ws],ClassFields,Ret) ->
+ match_optional_field(S,Rest,Ws,ClassFields,Ret);
+match_optional_field(S,[],_,ClassFields,Ret) ->
+ match_optional_field(S,[],[],ClassFields,Ret);
+%% identify and skip comma
+match_optional_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
+ match_optional_field(S,Rest,Ws,ClassFields,Ret);
+%% identify and save field data
+match_optional_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Ret) ->
+ WorS =
+ case Setting of
+ Type when record(Type,type) -> Type;
+%% #'Externalvaluereference'{value=WordOrSetting} -> WordOrSetting;
+ {'ValueFromObject',_,_} -> Setting;
+ {object,_,_} -> Setting;
+ {_,_,WordOrSetting} -> WordOrSetting;
+%% Atom when atom(Atom) -> Atom
+ Other -> Other
+ end,
+ case lists:keysearch(W,2,ClassFields) of
+ false ->
+ throw({asn1,{optional_matcherror,WorS,W}});
+ {value,CField} ->
+ NewField = convert_to_defaultfield(S,W,WorS,CField),
+ match_optional_field(S,Rest,Ws,ClassFields,[NewField|Ret])
+ end;
+match_optional_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Ret) ->
+ throw({asn1,{optional_matcherror,WorS,W}}).
+
+match_mandatory_field(_S,[],[],_,[Acc]) ->
+ {Acc,[],[]};
+match_mandatory_field(_S,[],[],_,Acc) ->
+ {Acc,[],[]};
+match_mandatory_field(S,[],[H|T],CF,Acc) when list(H) ->
+ match_mandatory_field(S,[],T,CF,Acc);
+match_mandatory_field(_S,[],WithSyntax,_,_Acc) ->
+ throw({asn1,{mandatory_matcherror,[],WithSyntax}});
+%match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,[Acc]) when list(W) ->
+match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,Acc) when list(W), length(Acc) >= 1 ->
+ {Acc,Fields,WithSyntax};
+%% identify and skip word
+match_mandatory_field(S,[{_,_,WorS}|Rest],
+ [WorS|Ws],ClassFields,Acc) ->
+ match_mandatory_field(S,Rest,Ws,ClassFields,Acc);
+%% identify and skip comma
+match_mandatory_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
+ match_mandatory_field(S,Rest,Ws,ClassFields,Ret);
+%% identify and save field data
+match_mandatory_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Acc) ->
+ WorS =
+ case Setting of
+%% Atom when atom(Atom) -> Atom;
+%% #'Externalvaluereference'{value=WordOrSetting} -> WordOrSetting;
+ {object,_,_} -> Setting;
+ {_,_,WordOrSetting} -> WordOrSetting;
+ Type when record(Type,type) -> Type;
+ Other -> Other
+ end,
+ case lists:keysearch(W,2,ClassFields) of
+ false ->
+ throw({asn1,{mandatory_matcherror,WorS,W}});
+ {value,CField} ->
+ NewField = convert_to_defaultfield(S,W,WorS,CField),
+ match_mandatory_field(S,Rest,Ws,ClassFields,[NewField|Acc])
+ end;
+
+match_mandatory_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Acc) ->
+ throw({asn1,{mandatory_matcherror,WorS,W}}).
+
+%% Converts a field of an object from defined syntax to default syntax
+convert_to_defaultfield(S,ObjFieldName,ObjFieldSetting,CField)->
+ CurrMod = S#state.mname,
+ case element(1,CField) of
+ typefield ->
+ TypeDef=
+ case ObjFieldSetting of
+ TypeRec when record(TypeRec,type) -> TypeRec#type.def;
+ TDef when record(TDef,typedef) ->
+ TDef#typedef{typespec=check_type(S,TDef,
+ TDef#typedef.typespec)};
+ _ -> ObjFieldSetting
+ end,
+ Type =
+ if
+ record(TypeDef,typedef) -> TypeDef;
+ true ->
+ case asn1ct_gen:type(asn1ct_gen:get_inner(TypeDef)) of
+ ERef = #'Externaltypereference'{module=CurrMod} ->
+ {_,T} = get_referenced_type(S,ERef),
+ T#typedef{checked=true,
+ typespec=check_type(S,T,
+ T#typedef.typespec)};
+ ERef = #'Externaltypereference'{module=ExtMod} ->
+ {_,T} = get_referenced_type(S,ERef),
+ #typedef{name=Name} = T,
+ check_type(S,T,T#typedef.typespec),
+ #typedef{checked=true,
+ name={ExtMod,Name},
+ typespec=ERef};
+ Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
+ T = check_type(S,#typedef{typespec=ObjFieldSetting},
+ ObjFieldSetting),
+ #typedef{checked=true,name=Bif,typespec=T};
+ _ ->
+ {Mod,T} =
+ %% get_referenced_type(S,#typereference{val=ObjFieldSetting}),
+ get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}),
+ case Mod of
+ CurrMod ->
+ T;
+ ExtMod ->
+ #typedef{name=Name} = T,
+ T#typedef{name={ExtMod,Name}}
+ end
+ end
+ end,
+ {ObjFieldName,Type};
+ fixedtypevaluefield ->
+ case ObjFieldName of
+ Val when atom(Val) ->
+ %% ObjFieldSetting can be a value,an objectidentifiervalue,
+ %% an element in an enumeration or namednumberlist etc.
+ ValRef =
+ case ObjFieldSetting of
+ #'Externalvaluereference'{} -> ObjFieldSetting;
+ {'ValueFromObject',{_,ObjRef},FieldName} ->
+ {_,Object} = get_referenced_type(S,ObjRef),
+ ChObject = check_object(S,Object,
+ Object#typedef.typespec),
+ get_fieldname_element(S,Object#typedef{typespec=ChObject},
+ FieldName);
+ #valuedef{} ->
+ ObjFieldSetting;
+ _ ->
+ #identifier{val=ObjFieldSetting}
+ end,
+ case ValRef of
+ #valuedef{} ->
+ {ObjFieldName,check_value(S,ValRef)};
+ _ ->
+ ValDef =
+ case catch get_referenced_type(S,ValRef) of
+ {error,_} ->
+ check_value(S,#valuedef{name=Val,
+ type=element(3,CField),
+ value=ObjFieldSetting});
+ {_,VDef} when record(VDef,valuedef) ->
+ check_value(S,VDef);%% XXX
+ {_,VDef} ->
+ check_value(S,#valuedef{name=Val,
+ type=element(3,CField),
+ value=VDef})
+ end,
+ {ObjFieldName,ValDef}
+ end;
+ Val ->
+ {ObjFieldName,Val}
+ end;
+ fixedtypevaluesetfield ->
+ {ObjFieldName,ObjFieldSetting};
+ objectfield ->
+ ObjectSpec =
+ case ObjFieldSetting of
+ Ref when record(Ref,typereference);record(Ref,identifier);
+ record(Ref,'Externaltypereference');
+ record(Ref,'Externalvaluereference') ->
+ {_,R} = get_referenced_type(S,ObjFieldSetting),
+ R;
+ {'ValueFromObject',{_,ObjRef},FieldName} ->
+ %% This is an ObjectFromObject
+ {_,Object} = get_referenced_type(S,ObjRef),
+ ChObject = check_object(S,Object,
+ Object#typedef.typespec),
+ _ObjFromObj=
+ get_fieldname_element(S,Object#typedef{
+ typespec=ChObject},
+ FieldName);
+ %%ClassName = ObjFromObj#'Object'.classname,
+ %%#typedef{name=,
+ %% typespec=
+ %% ObjFromObj#'Object'{classname=
+ %% {objectclassname,ClassName}}};
+ {object,_,_} ->
+ %% An object defined inlined in another object
+ #type{def=Ref} = element(3,CField),
+% CRef = case Ref of
+% #'Externaltypereference'{module=CurrMod,
+% type=CName} ->
+% CName;
+% #'Externaltypereference'{module=ExtMod,
+% type=CName} ->
+% {ExtMod,CName}
+% end,
+ InlinedObjName=
+ list_to_atom(lists:concat([S#state.tname]++
+ ['_',ObjFieldName])),
+% ObjSpec = #'Object'{classname={objectclassname,CRef},
+ ObjSpec = #'Object'{classname=Ref,
+ def=ObjFieldSetting},
+ CheckedObj=
+ check_object(S,#typedef{typespec=ObjSpec},ObjSpec),
+ InlObj = #typedef{checked=true,name=InlinedObjName,
+ typespec=CheckedObj},
+ asn1ct_gen:insert_once(inlined_objects,{InlinedObjName,
+ InlinedObjName}),
+ asn1_db:dbput(S#state.mname,InlinedObjName,InlObj),
+ InlObj;
+ #type{def=Eref} when record(Eref,'Externaltypereference') ->
+ {_,R} = get_referenced_type(S,Eref),
+ R;
+ _ ->
+%% {_,R} = get_referenced_type(S,#typereference{val=ObjFieldSetting}),
+ {_,R} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}),
+ R
+ end,
+ {ObjFieldName,
+ ObjectSpec#typedef{checked=true,
+ typespec=check_object(S,ObjectSpec,
+ ObjectSpec#typedef.typespec)}};
+ variabletypevaluefield ->
+ {ObjFieldName,ObjFieldSetting};
+ variabletypevaluesetfield ->
+ {ObjFieldName,ObjFieldSetting};
+ objectsetfield ->
+ {_,ObjSetSpec} =
+ case ObjFieldSetting of
+ Ref when record(Ref,'Externaltypereference');
+ record(Ref,'Externalvaluereference') ->
+ get_referenced_type(S,ObjFieldSetting);
+ ObjectList when list(ObjectList) ->
+ %% an objctset defined in the object,though maybe
+ %% parsed as a SequenceOfValue
+ %% The ObjectList may be a list of references to
+ %% objects, a ValueFromObject
+ {_,_,Type,_} = CField,
+ ClassDef = Type#type.def,
+ case ClassDef#'Externaltypereference'.module of
+ CurrMod ->
+ ClassDef#'Externaltypereference'.type;
+ ExtMod ->
+ {ExtMod,
+ ClassDef#'Externaltypereference'.type}
+ end,
+ {no_name,
+ #typedef{typespec=
+ #'ObjectSet'{class=
+% {objectclassname,ClassRef},
+ ClassDef,
+ set=ObjectList}}};
+ ObjectSet={'SingleValue',_} ->
+ %% a Union of defined objects
+ {_,_,Type,_} = CField,
+ ClassDef = Type#type.def,
+% ClassRef =
+% case ClassDef#'Externaltypereference'.module of
+% CurrMod ->
+% ClassDef#'Externaltypereference'.type;
+% ExtMod ->
+% {ExtMod,
+% ClassDef#'Externaltypereference'.type}
+% end,
+ {no_name,
+% #typedef{typespec=#'ObjectSet'{class={objectclassname,ClassRef},
+ #typedef{typespec=#'ObjectSet'{class=ClassDef,
+ set=ObjectSet}}};
+ {object,_,[#type{def={'TypeFromObject',
+ {object,RefedObj},
+ FieldName}}]} ->
+ %% This case occurs when an ObjectSetFromObjects
+ %% production is used
+ {M,Def} = get_referenced_type(S,RefedObj),
+ {M,get_fieldname_element(S,Def,FieldName)};
+ #type{def=Eref} when
+ record(Eref,'Externaltypereference') ->
+ get_referenced_type(S,Eref);
+ _ ->
+%% get_referenced_type(S,#typereference{val=ObjFieldSetting})
+ get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting})
+ end,
+ {ObjFieldName,
+ ObjSetSpec#typedef{checked=true,
+ typespec=check_object(S,ObjSetSpec,
+ ObjSetSpec#typedef.typespec)}}
+ end.
+
+check_value(OldS,V) when record(V,pvaluesetdef) ->
+ #pvaluesetdef{checked=Checked,type=Type} = V,
+ case Checked of
+ true -> V;
+ {error,_} -> V;
+ false ->
+ case get_referenced_type(OldS,Type#type.def) of
+ {_,Class} when record(Class,classdef) ->
+ throw({pobjectsetdef});
+ _ -> continue
+ end
+ end;
+check_value(_OldS,V) when record(V,pvaluedef) ->
+ %% Fix this case later
+ V;
+check_value(OldS,V) when record(V,typedef) ->
+ %% This case when a value set has been parsed as an object set.
+ %% It may be a value set
+ #typedef{typespec=TS} = V,
+ case TS of
+ #'ObjectSet'{class=ClassRef} ->
+ {_,TSDef} = get_referenced_type(OldS,ClassRef),
+ %%IsObjectSet(TSDef);
+ case TSDef of
+ #classdef{} -> throw({objectsetdef});
+ #typedef{typespec=#type{def=Eref}} when
+ record(Eref,'Externaltypereference') ->
+ %% This case if the class reference is a defined
+ %% reference to class
+ check_value(OldS,V#typedef{typespec=TS#'ObjectSet'{class=Eref}});
+ #typedef{} ->
+ % an ordinary value set with a type in #typedef.typespec
+ ValueSet = TS#'ObjectSet'.set,
+ Type=check_type(OldS,TSDef,TSDef#typedef.typespec),
+ Value = check_value(OldS,#valuedef{type=Type,
+ value=ValueSet}),
+ {valueset,Type#type{constraint=Value#valuedef.value}}
+ end;
+ _ ->
+ throw({objectsetdef})
+ end;
+check_value(S,#valuedef{pos=Pos,name=Name,type=Type,
+ value={valueset,Constr}}) ->
+ NewType = Type#type{constraint=[Constr]},
+ {valueset,
+ check_type(S,#typedef{pos=Pos,name=Name,typespec=NewType},NewType)};
+check_value(OldS=#state{recordtopname=TopName},V) when record(V,valuedef) ->
+ #valuedef{name=Name,checked=Checked,type=Vtype,value=Value} = V,
+ case Checked of
+ true ->
+ V;
+ {error,_} ->
+ V;
+ false ->
+ Def = Vtype#type.def,
+ Constr = Vtype#type.constraint,
+ S = OldS#state{type=Vtype,tname=Def,value=V,vname=Name},
+ NewDef =
+ case Def of
+ Ext when record(Ext,'Externaltypereference') ->
+ RecName = Ext#'Externaltypereference'.type,
+ {_,Type} = get_referenced_type(S,Ext),
+ %% If V isn't a value but an object Type is a #classdef{}
+ case Type of
+ #classdef{} ->
+ throw({objectdef});
+ #typedef{} ->
+ case is_contextswitchtype(Type) of
+ true ->
+ #valuedef{value=CheckedVal}=
+ check_value(S,V#valuedef{type=Type#typedef.typespec}),
+ #newv{value=CheckedVal};
+ _ ->
+ #valuedef{value=CheckedVal}=
+ check_value(S#state{recordtopname=[RecName|TopName]},
+ V#valuedef{type=Type#typedef.typespec}),
+ #newv{value=CheckedVal}
+ end
+ end;
+ 'ANY' ->
+ throw({error,{asn1,{'cant check value of type',Def}}});
+ 'INTEGER' ->
+ validate_integer(S,Value,[],Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ {'INTEGER',NamedNumberList} ->
+ validate_integer(S,Value,NamedNumberList,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ {'BIT STRING',NamedNumberList} ->
+ validate_bitstring(S,Value,NamedNumberList,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'NULL' ->
+ validate_null(S,Value,Constr),
+ #newv{};
+ 'OBJECT IDENTIFIER' ->
+ validate_objectidentifier(S,Value,Constr),
+ #newv{value = normalize_value(S,Vtype,Value,[])};
+ 'ObjectDescriptor' ->
+ validate_objectdescriptor(S,Value,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ {'ENUMERATED',NamedNumberList} ->
+ validate_enumerated(S,Value,NamedNumberList,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'BOOLEAN'->
+ validate_boolean(S,Value,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'OCTET STRING' ->
+ validate_octetstring(S,Value,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'NumericString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'TeletexString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'VideotexString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'UTCTime' ->
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+% exit({'cant check value of type' ,Def});
+ 'GeneralizedTime' ->
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+% exit({'cant check value of type' ,Def});
+ 'GraphicString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'VisibleString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'GeneralString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'PrintableString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'IA5String' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'BMPString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+%% 'UniversalString' -> %added 6/12 -00
+%% #newv{value=validate_restrictedstring(S,Value,Def,Constr)};
+ Seq when record(Seq,'SEQUENCE') ->
+ SeqVal = validate_sequence(S,Value,
+ Seq#'SEQUENCE'.components,
+ Constr),
+ #newv{value=normalize_value(S,Vtype,SeqVal,TopName)};
+ {'SEQUENCE OF',Components} ->
+ validate_sequenceof(S,Value,Components,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ {'CHOICE',Components} ->
+ validate_choice(S,Value,Components,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ Set when record(Set,'SET') ->
+ validate_set(S,Value,Set#'SET'.components,
+ Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ {'SET OF',Components} ->
+ validate_setof(S,Value,Components,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ Other ->
+ exit({'cant check value of type' ,Other})
+ end,
+ case NewDef#newv.value of
+ unchanged ->
+ V#valuedef{checked=true,value=Value};
+ ok ->
+ V#valuedef{checked=true,value=Value};
+ {error,Reason} ->
+ V#valuedef{checked={error,Reason},value=Value};
+ _V ->
+ V#valuedef{checked=true,value=_V}
+ end
+ end.
+
+is_contextswitchtype(#typedef{name='EXTERNAL'})->
+ true;
+is_contextswitchtype(#typedef{name='EMBEDDED PDV'}) ->
+ true;
+is_contextswitchtype(#typedef{name='CHARACTER STRING'}) ->
+ true;
+is_contextswitchtype(_) ->
+ false.
+
+% validate_integer(S,{identifier,Pos,Id},NamedNumberList,Constr) ->
+% case lists:keysearch(Id,1,NamedNumberList) of
+% {value,_} -> ok;
+% false -> error({value,"unknown NamedNumber",S})
+% end;
+%% This case occurs when there is a valuereference
+validate_integer(S=#state{mname=M},
+ #'Externalvaluereference'{module=M,value=Id},
+ NamedNumberList,_Constr) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown NamedNumber",S})
+ end;
+validate_integer(S,Id,NamedNumberList,_Constr) when atom(Id) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown NamedNumber",S})
+ end;
+validate_integer(_S,Value,_NamedNumberList,Constr) when integer(Value) ->
+ check_integer_range(Value,Constr).
+
+check_integer_range(Int,Constr) when list(Constr) ->
+ NewConstr = [X || #constraint{c=X} <- Constr],
+ check_constr(Int,NewConstr);
+
+check_integer_range(_Int,_Constr) ->
+ %%io:format("~p~n",[Constr]),
+ ok.
+
+check_constr(Int,[{'ValueRange',Lb,Ub}|T]) when Int >= Lb, Int =< Ub ->
+ check_constr(Int,T);
+check_constr(_Int,[]) ->
+ ok.
+
+validate_bitstring(_S,_Value,_NamedNumberList,_Constr) ->
+ ok.
+
+validate_null(_S,'NULL',_Constr) ->
+ ok.
+
+%%------------
+%% This can be removed when the old parser is removed
+%% The function removes 'space' atoms from the list
+
+is_space_list([H],Acc) ->
+ lists:reverse([H|Acc]);
+is_space_list([H,space|T],Acc) ->
+ is_space_list(T,[H|Acc]);
+is_space_list([],Acc) ->
+ lists:reverse(Acc);
+is_space_list([H|T],Acc) ->
+ is_space_list(T,[H|Acc]).
+
+validate_objectidentifier(S,L,_) ->
+ case is_space_list(L,[]) of
+ NewL when list(NewL) ->
+ case validate_objectidentifier1(S,NewL) of
+ NewL2 when list(NewL2) ->
+ list_to_tuple(NewL2);
+ Other -> Other
+ end;
+ {error,_} ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end.
+
+validate_objectidentifier1(S, [Id|T]) when record(Id,'Externalvaluereference') ->
+ case catch get_referenced_type(S,Id) of
+ {_,V} when record(V,valuedef) ->
+ case check_value(S,V) of
+ #valuedef{type=#type{def='OBJECT IDENTIFIER'},
+ checked=true,value=Value} when tuple(Value) ->
+ validate_objectid(S, T, lists:reverse(tuple_to_list(Value)));
+ _ ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end;
+ _ ->
+ validate_objectid(S, [Id|T], [])
+ end;
+validate_objectidentifier1(S,V) ->
+ validate_objectid(S,V,[]).
+
+validate_objectid(_, [], Acc) ->
+ lists:reverse(Acc);
+validate_objectid(S, [Value|Vrest], Acc) when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+validate_objectid(S, [{'NamedNumber',_Name,Value}|Vrest], Acc)
+ when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+validate_objectid(S, [Id|Vrest], Acc)
+ when record(Id,'Externalvaluereference') ->
+ case catch get_referenced_type(S, Id) of
+ {_,V} when record(V,valuedef) ->
+ case check_value(S, V) of
+ #valuedef{checked=true,value=Value} when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+ _ ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end;
+ _ ->
+ case reserved_objectid(Id#'Externalvaluereference'.value, Acc) of
+ Value when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+ false ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end
+ end;
+validate_objectid(S, [{Atom,Value}],[]) when atom(Atom),integer(Value) ->
+ %% this case when an OBJECT IDENTIFIER value has been parsed as a
+ %% SEQUENCE value
+ Rec = #'Externalvaluereference'{module=S#state.mname,
+ value=Atom},
+ validate_objectidentifier1(S,[Rec,Value]);
+validate_objectid(S, [{Atom,EVRef}],[])
+ when atom(Atom),record(EVRef,'Externalvaluereference') ->
+ %% this case when an OBJECT IDENTIFIER value has been parsed as a
+ %% SEQUENCE value OTP-4354
+ Rec = #'Externalvaluereference'{module=S#state.mname,
+ value=Atom},
+ validate_objectidentifier1(S,[Rec,EVRef]);
+validate_objectid(S, _V, _Acc) ->
+ error({value, "illegal OBJECT IDENTIFIER",S}).
+
+
+%% ITU-T Rec. X.680 Annex B - D
+reserved_objectid('itu-t',[]) -> 0;
+reserved_objectid('ccitt',[]) -> 0;
+%% arcs below "itu-t"
+reserved_objectid('recommendation',[0]) -> 0;
+reserved_objectid('question',[0]) -> 1;
+reserved_objectid('administration',[0]) -> 2;
+reserved_objectid('network-operator',[0]) -> 3;
+reserved_objectid('identified-organization',[0]) -> 4;
+%% arcs below "recommendation"
+reserved_objectid('a',[0,0]) -> 1;
+reserved_objectid('b',[0,0]) -> 2;
+reserved_objectid('c',[0,0]) -> 3;
+reserved_objectid('d',[0,0]) -> 4;
+reserved_objectid('e',[0,0]) -> 5;
+reserved_objectid('f',[0,0]) -> 6;
+reserved_objectid('g',[0,0]) -> 7;
+reserved_objectid('h',[0,0]) -> 8;
+reserved_objectid('i',[0,0]) -> 9;
+reserved_objectid('j',[0,0]) -> 10;
+reserved_objectid('k',[0,0]) -> 11;
+reserved_objectid('l',[0,0]) -> 12;
+reserved_objectid('m',[0,0]) -> 13;
+reserved_objectid('n',[0,0]) -> 14;
+reserved_objectid('o',[0,0]) -> 15;
+reserved_objectid('p',[0,0]) -> 16;
+reserved_objectid('q',[0,0]) -> 17;
+reserved_objectid('r',[0,0]) -> 18;
+reserved_objectid('s',[0,0]) -> 19;
+reserved_objectid('t',[0,0]) -> 20;
+reserved_objectid('u',[0,0]) -> 21;
+reserved_objectid('v',[0,0]) -> 22;
+reserved_objectid('w',[0,0]) -> 23;
+reserved_objectid('x',[0,0]) -> 24;
+reserved_objectid('y',[0,0]) -> 25;
+reserved_objectid('z',[0,0]) -> 26;
+
+
+reserved_objectid(iso,[]) -> 1;
+%% arcs below "iso", note that number 1 is not used
+reserved_objectid('standard',[1]) -> 0;
+reserved_objectid('member-body',[1]) -> 2;
+reserved_objectid('identified-organization',[1]) -> 3;
+
+reserved_objectid('joint-iso-itu-t',[]) -> 2;
+reserved_objectid('joint-iso-ccitt',[]) -> 2;
+
+reserved_objectid(_,_) -> false.
+
+
+
+
+
+validate_objectdescriptor(_S,_Value,_Constr) ->
+ ok.
+
+validate_enumerated(S,Id,NamedNumberList,_Constr) when atom(Id) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown ENUMERATED",S})
+ end;
+validate_enumerated(S,{identifier,_Pos,Id},NamedNumberList,_Constr) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown ENUMERATED",S})
+ end;
+validate_enumerated(S,#'Externalvaluereference'{value=Id},
+ NamedNumberList,_Constr) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown ENUMERATED",S})
+ end.
+
+validate_boolean(_S,_Value,_Constr) ->
+ ok.
+
+validate_octetstring(_S,_Value,_Constr) ->
+ ok.
+
+validate_restrictedstring(_S,_Value,_Def,_Constr) ->
+ ok.
+
+validate_sequence(S=#state{type=Vtype},Value,_Components,_Constr) ->
+ case Vtype of
+ #type{tag=[{tag,'UNIVERSAL',8,'IMPLICIT',32}]} ->
+ %% this is an 'EXTERNAL' (or INSTANCE OF)
+ case Value of
+ [{identification,_}|_RestVal] ->
+ to_EXTERNAL1990(S,Value);
+ _ ->
+ Value
+ end;
+ _ ->
+ Value
+ end.
+
+validate_sequenceof(_S,_Value,_Components,_Constr) ->
+ ok.
+
+validate_choice(_S,_Value,_Components,_Constr) ->
+ ok.
+
+validate_set(_S,_Value,_Components,_Constr) ->
+ ok.
+
+validate_setof(_S,_Value,_Components,_Constr) ->
+ ok.
+
+to_EXTERNAL1990(S,[{identification,{'CHOICE',{syntax,Stx}}}|Rest]) ->
+ to_EXTERNAL1990(S,Rest,[{'direct-reference',Stx}]);
+to_EXTERNAL1990(S,[{identification,{'CHOICE',{'presentation-context-id',I}}}|Rest]) ->
+ to_EXTERNAL1990(S,Rest,[{'indirect-reference',I}]);
+to_EXTERNAL1990(S,[{identification,{'CHOICE',{'context-negotiation',[{_,PCid},{_,TrStx}]}}}|Rest]) ->
+ to_EXTERNAL1990(S,Rest,[{'indirect-reference',PCid},{'direct-reference',TrStx}]);
+to_EXTERNAL1990(S,_) ->
+ error({value,"illegal value in EXTERNAL type",S}).
+
+to_EXTERNAL1990(S,[V={'data-value-descriptor',_}|Rest],Acc) ->
+ to_EXTERNAL1990(S,Rest,[V|Acc]);
+to_EXTERNAL1990(_S,[{'data-value',Val}],Acc) ->
+ Encoding = {encoding,{'CHOICE',{'octet-aligned',Val}}},
+ lists:reverse([Encoding|Acc]);
+to_EXTERNAL1990(S,_,_) ->
+ error({value,"illegal value in EXTERNAL type",S}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Functions to normalize the default values of SEQUENCE
+%% and SET components into Erlang valid format
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+normalize_value(_,_,mandatory,_) ->
+ mandatory;
+normalize_value(_,_,'OPTIONAL',_) ->
+ 'OPTIONAL';
+normalize_value(S,Type,{'DEFAULT',Value},NameList) ->
+ case catch get_canonic_type(S,Type,NameList) of
+ {'BOOLEAN',CType,_} ->
+ normalize_boolean(S,Value,CType);
+ {'INTEGER',CType,_} ->
+ normalize_integer(S,Value,CType);
+ {'BIT STRING',CType,_} ->
+ normalize_bitstring(S,Value,CType);
+ {'OCTET STRING',CType,_} ->
+ normalize_octetstring(S,Value,CType);
+ {'NULL',_CType,_} ->
+ %%normalize_null(Value);
+ 'NULL';
+ {'OBJECT IDENTIFIER',_,_} ->
+ normalize_objectidentifier(S,Value);
+ {'ObjectDescriptor',_,_} ->
+ normalize_objectdescriptor(Value);
+ {'REAL',_,_} ->
+ normalize_real(Value);
+ {'ENUMERATED',CType,_} ->
+ normalize_enumerated(Value,CType);
+ {'CHOICE',CType,NewNameList} ->
+ normalize_choice(S,Value,CType,NewNameList);
+ {'SEQUENCE',CType,NewNameList} ->
+ normalize_sequence(S,Value,CType,NewNameList);
+ {'SEQUENCE OF',CType,NewNameList} ->
+ normalize_seqof(S,Value,CType,NewNameList);
+ {'SET',CType,NewNameList} ->
+ normalize_set(S,Value,CType,NewNameList);
+ {'SET OF',CType,NewNameList} ->
+ normalize_setof(S,Value,CType,NewNameList);
+ {restrictedstring,CType,_} ->
+ normalize_restrictedstring(S,Value,CType);
+ _ ->
+ io:format("WARNING: could not check default value ~p~n",[Value]),
+ Value
+ end;
+normalize_value(S,Type,Val,NameList) ->
+ normalize_value(S,Type,{'DEFAULT',Val},NameList).
+
+normalize_boolean(S,{Name,Bool},CType) when atom(Name) ->
+ normalize_boolean(S,Bool,CType);
+normalize_boolean(_,true,_) ->
+ true;
+normalize_boolean(_,false,_) ->
+ false;
+normalize_boolean(S,Bool=#'Externalvaluereference'{},CType) ->
+ get_normalized_value(S,Bool,CType,fun normalize_boolean/3,[]);
+normalize_boolean(_,Other,_) ->
+ throw({error,{asn1,{'invalid default value',Other}}}).
+
+normalize_integer(_S,Int,_) when integer(Int) ->
+ Int;
+normalize_integer(_S,{Name,Int},_) when atom(Name),integer(Int) ->
+ Int;
+normalize_integer(S,{Name,Int=#'Externalvaluereference'{}},
+ Type) when atom(Name) ->
+ normalize_integer(S,Int,Type);
+normalize_integer(S,Int=#'Externalvaluereference'{value=Name},Type) ->
+ case Type of
+ NNL when list(NNL) ->
+ case lists:keysearch(Name,1,NNL) of
+ {value,{Name,Val}} ->
+ Val;
+ false ->
+ get_normalized_value(S,Int,Type,
+ fun normalize_integer/3,[])
+ end;
+ _ ->
+ get_normalized_value(S,Int,Type,fun normalize_integer/3,[])
+ end;
+normalize_integer(_,Int,_) ->
+ exit({'Unknown INTEGER value',Int}).
+
+normalize_bitstring(S,Value,Type)->
+ %% There are four different Erlang formats of BIT STRING:
+ %% 1 - a list of ones and zeros.
+ %% 2 - a list of atoms.
+ %% 3 - as an integer, for instance in hexadecimal form.
+ %% 4 - as a tuple {Unused, Binary} where Unused is an integer
+ %% and tells how many bits of Binary are unused.
+ %%
+ %% normalize_bitstring/3 transforms Value according to:
+ %% A to 3,
+ %% B to 1,
+ %% C to 1 or 3
+ %% D to 2,
+ %% Value can be on format:
+ %% A - {hstring, String}, where String is a hexadecimal string.
+ %% B - {bstring, String}, where String is a string on bit format
+ %% C - #'Externalvaluereference'{value=V}, where V is a defined value
+ %% D - list of #'Externalvaluereference', where each value component
+ %% is an identifier corresponing to NamedBits in Type.
+ case Value of
+ {hstring,String} when list(String) ->
+ hstring_to_int(String);
+ {bstring,String} when list(String) ->
+ bstring_to_bitlist(String);
+ Rec when record(Rec,'Externalvaluereference') ->
+ get_normalized_value(S,Value,Type,
+ fun normalize_bitstring/3,[]);
+ RecList when list(RecList) ->
+ case Type of
+ NBL when list(NBL) ->
+ F = fun(#'Externalvaluereference'{value=Name}) ->
+ case lists:keysearch(Name,1,NBL) of
+ {value,{Name,_}} ->
+ Name;
+ Other ->
+ throw({error,Other})
+ end;
+ (Other) ->
+ throw({error,Other})
+ end,
+ case catch lists:map(F,RecList) of
+ {error,Reason} ->
+ io:format("WARNING: default value not "
+ "compatible with type definition ~p~n",
+ [Reason]),
+ Value;
+ NewList ->
+ NewList
+ end;
+ _ ->
+ io:format("WARNING: default value not "
+ "compatible with type definition ~p~n",
+ [RecList]),
+ Value
+ end;
+ {Name,String} when atom(Name) ->
+ normalize_bitstring(S,String,Type);
+ Other ->
+ io:format("WARNING: illegal default value ~p~n",[Other]),
+ Value
+ end.
+
+hstring_to_int(L) when list(L) ->
+ hstring_to_int(L,0).
+hstring_to_int([H|T],Acc) when H >= $A, H =< $F ->
+ hstring_to_int(T,(Acc bsl 4) + (H - $A + 10) ) ;
+hstring_to_int([H|T],Acc) when H >= $0, H =< $9 ->
+ hstring_to_int(T,(Acc bsl 4) + (H - $0));
+hstring_to_int([],Acc) ->
+ Acc.
+
+bstring_to_bitlist([H|T]) when H == $0; H == $1 ->
+ [H - $0 | bstring_to_bitlist(T)];
+bstring_to_bitlist([]) ->
+ [].
+
+%% normalize_octetstring/1 changes representation of input Value to a
+%% list of octets.
+%% Format of Value is one of:
+%% {bstring,String} each element in String corresponds to one bit in an octet
+%% {hstring,String} each element in String corresponds to one byte in an octet
+%% #'Externalvaluereference'
+normalize_octetstring(S,Value,CType) ->
+ case Value of
+ {bstring,String} ->
+ bstring_to_octetlist(String);
+ {hstring,String} ->
+ hstring_to_octetlist(String);
+ Rec when record(Rec,'Externalvaluereference') ->
+ get_normalized_value(S,Value,CType,
+ fun normalize_octetstring/3,[]);
+ {Name,String} when atom(Name) ->
+ normalize_octetstring(S,String,CType);
+ List when list(List) ->
+ %% check if list elements are valid octet values
+ lists:map(fun([])-> ok;
+ (H)when H > 255->
+ io:format("WARNING: not legal octet value ~p in OCTET STRING, ~p~n",[H,List]);
+ (_)-> ok
+ end, List),
+ List;
+ Other ->
+ io:format("WARNING: unknown default value ~p~n",[Other]),
+ Value
+ end.
+
+
+bstring_to_octetlist([]) ->
+ [];
+bstring_to_octetlist([H|T]) when H == $0 ; H == $1 ->
+ bstring_to_octetlist(T,6,[(H - $0) bsl 7]).
+bstring_to_octetlist([H|T],0,[Hacc|Tacc]) when H == $0; H == $1 ->
+ bstring_to_octetlist(T, 7, [0,Hacc + (H -$0)| Tacc]);
+bstring_to_octetlist([H|T],BSL,[Hacc|Tacc]) when H == $0; H == $1 ->
+ bstring_to_octetlist(T, BSL-1, [Hacc + ((H - $0) bsl BSL)| Tacc]);
+bstring_to_octetlist([],7,[0|Acc]) ->
+ lists:reverse(Acc);
+bstring_to_octetlist([],_,Acc) ->
+ lists:reverse(Acc).
+
+hstring_to_octetlist([]) ->
+ [];
+hstring_to_octetlist(L) ->
+ hstring_to_octetlist(L,4,[]).
+hstring_to_octetlist([H|T],0,[Hacc|Tacc]) when H >= $A, H =< $F ->
+ hstring_to_octetlist(T,4,[Hacc + (H - $A + 10)|Tacc]);
+hstring_to_octetlist([H|T],BSL,Acc) when H >= $A, H =< $F ->
+ hstring_to_octetlist(T,0,[(H - $A + 10) bsl BSL|Acc]);
+hstring_to_octetlist([H|T],0,[Hacc|Tacc]) when H >= $0; H =< $9 ->
+ hstring_to_octetlist(T,4,[Hacc + (H - $0)|Tacc]);
+hstring_to_octetlist([H|T],BSL,Acc) when H >= $0; H =< $9 ->
+ hstring_to_octetlist(T,0,[(H - $0) bsl BSL|Acc]);
+hstring_to_octetlist([],_,Acc) ->
+ lists:reverse(Acc).
+
+normalize_objectidentifier(S,Value) ->
+ validate_objectidentifier(S,Value,[]).
+
+normalize_objectdescriptor(Value) ->
+ Value.
+
+normalize_real(Value) ->
+ Value.
+
+normalize_enumerated(#'Externalvaluereference'{value=V},CType)
+ when list(CType) ->
+ normalize_enumerated2(V,CType);
+normalize_enumerated(Value,CType) when atom(Value),list(CType) ->
+ normalize_enumerated2(Value,CType);
+normalize_enumerated({Name,EnumV},CType) when atom(Name) ->
+ normalize_enumerated(EnumV,CType);
+normalize_enumerated(Value,{CType1,CType2}) when list(CType1), list(CType2)->
+ normalize_enumerated(Value,CType1++CType2);
+normalize_enumerated(V,CType) ->
+ io:format("WARNING: Enumerated unknown type ~p~n",[CType]),
+ V.
+normalize_enumerated2(V,Enum) ->
+ case lists:keysearch(V,1,Enum) of
+ {value,{Val,_}} -> Val;
+ _ ->
+ io:format("WARNING: Enumerated value is not correct ~p~n",[V]),
+ V
+ end.
+
+normalize_choice(S,{'CHOICE',{C,V}},CType,NameList) when atom(C) ->
+ Value =
+ case V of
+ Rec when record(Rec,'Externalvaluereference') ->
+ get_normalized_value(S,V,CType,
+ fun normalize_choice/4,
+ [NameList]);
+ _ -> V
+ end,
+ case catch lists:keysearch(C,#'ComponentType'.name,CType) of
+ {value,#'ComponentType'{typespec=CT,name=Name}} ->
+ {C,normalize_value(S,CT,{'DEFAULT',Value},
+ [Name|NameList])};
+ Other ->
+ io:format("WARNING: Wrong format of type/value ~p/~p~n",
+ [Other,Value]),
+ {C,Value}
+ end;
+normalize_choice(S,{'DEFAULT',ValueList},CType,NameList) ->
+ lists:map(fun(X)-> normalize_choice(S,X,CType,NameList) end, ValueList);
+normalize_choice(S,Val=#'Externalvaluereference'{},CType,NameList) ->
+ {_,#valuedef{value=V}}=get_referenced_type(S,Val),
+ normalize_choice(S,{'CHOICE',V},CType,NameList);
+% get_normalized_value(S,Val,CType,fun normalize_choice/4,[NameList]);
+normalize_choice(S,{Name,ChoiceVal},CType,NameList)
+ when atom(Name) ->
+ normalize_choice(S,ChoiceVal,CType,NameList).
+
+normalize_sequence(S,{Name,Value},Components,NameList)
+ when atom(Name),list(Value) ->
+ normalize_sequence(S,Value,Components,NameList);
+normalize_sequence(S,Value,Components,NameList) ->
+ normalized_record('SEQUENCE',S,Value,Components,NameList).
+
+normalize_set(S,{Name,Value},Components,NameList)
+ when atom(Name),list(Value) ->
+ normalized_record('SET',S,Value,Components,NameList);
+normalize_set(S,Value,Components,NameList) ->
+ normalized_record('SET',S,Value,Components,NameList).
+
+normalized_record(SorS,S,Value,Components,NameList) ->
+ NewName = list_to_atom(asn1ct_gen:list2name(NameList)),
+ NoComps = length(Components),
+ case normalize_seq_or_set(SorS,S,Value,Components,NameList,[]) of
+ ListOfVals when length(ListOfVals) == NoComps ->
+ list_to_tuple([NewName|ListOfVals]);
+ _ ->
+ error({type,{illegal,default,value,Value},S})
+ end.
+
+normalize_seq_or_set(SorS,S,[{Cname,V}|Vs],
+ [#'ComponentType'{name=Cname,typespec=TS}|Cs],
+ NameList,Acc) ->
+ NewNameList =
+ case TS#type.def of
+ #'Externaltypereference'{type=TName} ->
+ [TName];
+ _ -> [Cname|NameList]
+ end,
+ NVal = normalize_value(S,TS,{'DEFAULT',V},NewNameList),
+ normalize_seq_or_set(SorS,S,Vs,Cs,NameList,[NVal|Acc]);
+normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
+ [#'ComponentType'{prop='OPTIONAL'}|Cs],
+ NameList,Acc) ->
+ normalize_seq_or_set(SorS,S,Values,Cs,NameList,[asn1_NOVALUE|Acc]);
+normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
+ [#'ComponentType'{name=Cname2,typespec=TS,
+ prop={'DEFAULT',Value}}|Cs],
+ NameList,Acc) ->
+ NewNameList =
+ case TS#type.def of
+ #'Externaltypereference'{type=TName} ->
+ [TName];
+ _ -> [Cname2|NameList]
+ end,
+ NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList),
+ normalize_seq_or_set(SorS,S,Values,Cs,NameList,[NVal|Acc]);
+normalize_seq_or_set(_SorS,_S,[],[],_,Acc) ->
+ lists:reverse(Acc);
+%% If default value is {} ComponentTypes in SEQUENCE are marked DEFAULT
+%% or OPTIONAL (or the type is defined SEQUENCE{}, which is handled by
+%% the previous case).
+normalize_seq_or_set(SorS,S,[],
+ [#'ComponentType'{name=Name,typespec=TS,
+ prop={'DEFAULT',Value}}|Cs],
+ NameList,Acc) ->
+ NewNameList =
+ case TS#type.def of
+ #'Externaltypereference'{type=TName} ->
+ [TName];
+ _ -> [Name|NameList]
+ end,
+ NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList),
+ normalize_seq_or_set(SorS,S,[],Cs,NameList,[NVal|Acc]);
+normalize_seq_or_set(SorS,S,[],[#'ComponentType'{prop='OPTIONAL'}|Cs],
+ NameList,Acc) ->
+ normalize_seq_or_set(SorS,S,[],Cs,NameList,[asn1_NOVALUE|Acc]);
+normalize_seq_or_set(SorS,S,Value=#'Externalvaluereference'{},
+ Cs,NameList,Acc) ->
+ get_normalized_value(S,Value,Cs,fun normalize_seq_or_set/6,
+ [SorS,NameList,Acc]);
+normalize_seq_or_set(_SorS,S,V,_,_,_) ->
+ error({type,{illegal,default,value,V},S}).
+
+normalize_seqof(S,Value,Type,NameList) ->
+ normalize_s_of('SEQUENCE OF',S,Value,Type,NameList).
+
+normalize_setof(S,Value,Type,NameList) ->
+ normalize_s_of('SET OF',S,Value,Type,NameList).
+
+normalize_s_of(SorS,S,Value,Type,NameList) when list(Value) ->
+ DefValueList = lists:map(fun(X) -> {'DEFAULT',X} end,Value),
+ Suffix = asn1ct_gen:constructed_suffix(SorS,Type),
+ Def = Type#type.def,
+ InnerType = asn1ct_gen:get_inner(Def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ NewNameList =
+ case WhatKind of
+ {constructed,bif} ->
+ [Suffix|NameList];
+ #'Externaltypereference'{type=Name} ->
+ [Name];
+ _ -> []
+ end,
+ NormFun = fun (X) -> normalize_value(S,Type,X,
+ NewNameList) end,
+ case catch lists:map(NormFun, DefValueList) of
+ List when list(List) ->
+ List;
+ _ ->
+ io:format("WARNING: ~p could not handle value ~p~n",
+ [SorS,Value]),
+ Value
+ end;
+normalize_s_of(SorS,S,Value,Type,NameList)
+ when record(Value,'Externalvaluereference') ->
+ get_normalized_value(S,Value,Type,fun normalize_s_of/5,
+ [SorS,NameList]).
+% case catch get_referenced_type(S,Value) of
+% {_,#valuedef{value=V}} ->
+% normalize_s_of(SorS,S,V,Type);
+% {error,Reason} ->
+% io:format("WARNING: ~p could not handle value ~p~n",
+% [SorS,Value]),
+% Value;
+% {_,NewVal} ->
+% normalize_s_of(SorS,S,NewVal,Type);
+% _ ->
+% io:format("WARNING: ~p could not handle value ~p~n",
+% [SorS,Value]),
+% Value
+% end.
+
+
+%% normalize_restrictedstring handles all format of restricted strings.
+%% tuple case
+normalize_restrictedstring(_S,[Int1,Int2],_) when integer(Int1),integer(Int2) ->
+ {Int1,Int2};
+%% quadruple case
+normalize_restrictedstring(_S,[Int1,Int2,Int3,Int4],_) when integer(Int1),
+ integer(Int2),
+ integer(Int3),
+ integer(Int4) ->
+ {Int1,Int2,Int3,Int4};
+%% character string list case
+normalize_restrictedstring(S,[H|T],CType) when list(H);tuple(H) ->
+ [normalize_restrictedstring(S,H,CType)|normalize_restrictedstring(S,T,CType)];
+%% character sting case
+normalize_restrictedstring(_S,CString,_) when list(CString) ->
+ Fun =
+ fun(X) ->
+ if
+ $X =< 255, $X >= 0 ->
+ ok;
+ true ->
+ io:format("WARNING: illegal character in string"
+ " ~p~n",[X])
+ end
+ end,
+ lists:foreach(Fun,CString),
+ CString;
+%% definedvalue case or argument in a parameterized type
+normalize_restrictedstring(S,ERef,CType) when record(ERef,'Externalvaluereference') ->
+ get_normalized_value(S,ERef,CType,
+ fun normalize_restrictedstring/3,[]);
+%%
+normalize_restrictedstring(S,{Name,Val},CType) when atom(Name) ->
+ normalize_restrictedstring(S,Val,CType).
+
+
+get_normalized_value(S,Val,Type,Func,AddArg) ->
+ case catch get_referenced_type(S,Val) of
+ {_,#valuedef{type=_T,value=V}} ->
+ %% should check that Type and T equals
+ call_Func(S,V,Type,Func,AddArg);
+ {error,_} ->
+ io:format("WARNING: default value not "
+ "comparable ~p~n",[Val]),
+ Val;
+ {_,NewVal} ->
+ call_Func(S,NewVal,Type,Func,AddArg);
+ _ ->
+ io:format("WARNING: default value not "
+ "comparable ~p~n",[Val]),
+ Val
+ end.
+
+call_Func(S,Val,Type,Func,ArgList) ->
+ case ArgList of
+ [] ->
+ Func(S,Val,Type);
+ [LastArg] ->
+ Func(S,Val,Type,LastArg);
+ [Arg1,LastArg1] ->
+ Func(Arg1,S,Val,Type,LastArg1);
+ [Arg1,LastArg1,LastArg2] ->
+ Func(Arg1,S,Val,Type,LastArg1,LastArg2)
+ end.
+
+
+get_canonic_type(S,Type,NameList) ->
+ {InnerType,NewType,NewNameList} =
+ case Type#type.def of
+ Name when atom(Name) ->
+ {Name,Type,NameList};
+ Ref when record(Ref,'Externaltypereference') ->
+ {_,#typedef{name=Name,typespec=RefedType}} =
+ get_referenced_type(S,Ref),
+ get_canonic_type(S,RefedType,[Name]);
+ {Name,T} when atom(Name) ->
+ {Name,T,NameList};
+ Seq when record(Seq,'SEQUENCE') ->
+ {'SEQUENCE',Seq#'SEQUENCE'.components,NameList};
+ Set when record(Set,'SET') ->
+ {'SET',Set#'SET'.components,NameList}
+ end,
+ {asn1ct_gen:unify_if_string(InnerType),NewType,NewNameList}.
+
+
+
+check_ptype(_S,Type,Ts) when record(Ts,type) ->
+ %Tag = Ts#type.tag,
+ %Constr = Ts#type.constraint,
+ Def = Ts#type.def,
+ NewDef=
+ case Def of
+ Seq when record(Seq,'SEQUENCE') ->
+ #newt{type=Seq#'SEQUENCE'{pname=Type#ptypedef.name}};
+ Set when record(Set,'SET') ->
+ #newt{type=Set#'SET'{pname=Type#ptypedef.name}};
+ _Other ->
+ #newt{}
+ end,
+ Ts2 = case NewDef of
+ #newt{type=unchanged} ->
+ Ts;
+ #newt{type=TDef}->
+ Ts#type{def=TDef}
+ end,
+ Ts2.
+
+
+% check_type(S,Type,ObjSpec={{objectclassname,_},_}) ->
+% check_class(S,ObjSpec);
+check_type(_S,Type,Ts) when record(Type,typedef),
+ (Type#typedef.checked==true) ->
+ Ts;
+check_type(_S,Type,Ts) when record(Type,typedef),
+ (Type#typedef.checked==idle) -> % the check is going on
+ Ts;
+check_type(S=#state{recordtopname=TopName},Type,Ts) when record(Ts,type) ->
+ {Def,Tag,Constr} =
+ case match_parameters(Ts#type.def,S#state.parameters) of
+ #type{constraint=_Ctmp,def=Dtmp} ->
+ {Dtmp,Ts#type.tag,Ts#type.constraint};
+ Dtmp ->
+ {Dtmp,Ts#type.tag,Ts#type.constraint}
+ end,
+ TempNewDef = #newt{type=Def,tag=Tag,constraint=Constr},
+ TestFun =
+ fun(Tref) ->
+ {_,MaybeChoice} = get_referenced_type(S,Tref),
+ case catch((MaybeChoice#typedef.typespec)#type.def) of
+ {'CHOICE',_} ->
+ maybe_illicit_implicit_tag(choice,Tag);
+ 'ANY' ->
+ maybe_illicit_implicit_tag(open_type,Tag);
+ 'ANY DEFINED BY' ->
+ maybe_illicit_implicit_tag(open_type,Tag);
+ 'ASN1_OPEN_TYPE' ->
+ maybe_illicit_implicit_tag(open_type,Tag);
+ _ ->
+ Tag
+ end
+ end,
+ NewDef=
+ case Def of
+ Ext when record(Ext,'Externaltypereference') ->
+ {_,RefTypeDef} = get_referenced_type(S,Ext),
+% case RefTypeDef of
+% Class when record(Class,classdef) ->
+% throw({asn1_class,Class});
+% _ -> ok
+% end,
+ case is_class(S,RefTypeDef) of
+ true -> throw({asn1_class,RefTypeDef});
+ _ -> ok
+ end,
+ Ct = TestFun(Ext),
+ RefType =
+%case S#state.erule of
+% ber_bin_v2 ->
+ case RefTypeDef#typedef.checked of
+ true ->
+ RefTypeDef#typedef.typespec;
+ _ ->
+ NewRefTypeDef1 = RefTypeDef#typedef{checked=idle},
+ asn1_db:dbput(S#state.mname,
+ NewRefTypeDef1#typedef.name,NewRefTypeDef1),
+ RefType1 =
+ check_type(S,RefTypeDef,RefTypeDef#typedef.typespec),
+ NewRefTypeDef2 =
+ RefTypeDef#typedef{checked=true,typespec = RefType1},
+ asn1_db:dbput(S#state.mname,
+ NewRefTypeDef2#typedef.name,NewRefTypeDef2),
+ %% update the type and mark as checked
+ RefType1
+ end,
+% _ -> RefTypeDef#typedef.typespec
+% end,
+
+ case asn1ct_gen:prim_bif(asn1ct_gen:get_inner(RefType#type.def)) of
+ true ->
+ %% Here we expand to a built in type and inline it
+ TempNewDef#newt{
+ type=
+ RefType#type.def,
+ tag=
+ merge_tags(Ct,RefType#type.tag),
+ constraint=
+ merge_constraints(check_constraints(S,Constr),
+ RefType#type.constraint)};
+ _ ->
+ %% Here we only expand the tags and keep the ext ref
+
+ TempNewDef#newt{
+ type=
+ check_externaltypereference(S,Ext),
+ tag =
+ case S#state.erule of
+ ber_bin_v2 ->
+ merge_tags(Ct,RefType#type.tag);
+ _ ->
+ Ct
+ end
+ }
+ end;
+ 'ANY' ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+ {'ANY_DEFINED_BY',_} ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+ 'INTEGER' ->
+ check_integer(S,[],Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_INTEGER))};
+
+ {'INTEGER',NamedNumberList} ->
+ TempNewDef#newt{type={'INTEGER',check_integer(S,NamedNumberList,Constr)},
+ tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_INTEGER))};
+ {'BIT STRING',NamedNumberList} ->
+ NewL = check_bitstring(S,NamedNumberList,Constr),
+%% erlang:display({asn1ct_check,NamedNumberList,NewL}),
+ TempNewDef#newt{type={'BIT STRING',NewL},
+ tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_BIT_STRING))};
+ 'NULL' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_NULL))};
+ 'OBJECT IDENTIFIER' ->
+ check_objectidentifier(S,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER))};
+ 'ObjectDescriptor' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_OBJECT_DESCRIPTOR))};
+ 'EXTERNAL' ->
+%% AssociatedType = asn1_db:dbget(S#state.mname,'EXTERNAL'),
+%% #newt{type=check_type(S,Type,AssociatedType)};
+ put(external,unchecked),
+ TempNewDef#newt{type=
+ #'Externaltypereference'{module=S#state.mname,
+ type='EXTERNAL'},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_EXTERNAL))};
+ {'INSTANCE OF',DefinedObjectClass,Constraint} ->
+ %% check that DefinedObjectClass is of TYPE-IDENTIFIER class
+ %% If Constraint is empty make it the general INSTANCE OF type
+ %% If Constraint is not empty make an inlined type
+ %% convert INSTANCE OF to the associated type
+ IOFDef=check_instance_of(S,DefinedObjectClass,Constraint),
+ TempNewDef#newt{type=IOFDef,
+ tag=merge_tags(Tag,?TAG_CONSTRUCTED(?N_INSTANCE_OF))};
+ {'ENUMERATED',NamedNumberList} ->
+ TempNewDef#newt{type=
+ {'ENUMERATED',
+ check_enumerated(S,NamedNumberList,Constr)},
+ tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_ENUMERATED))};
+ 'EMBEDDED PDV' ->
+% AssociatedType = asn1_db:dbget(S#state.mname,'EMBEDDED PDV'),
+% CheckedType = check_type(S,Type,
+% AssociatedType#typedef.typespec),
+ put(embedded_pdv,unchecked),
+ TempNewDef#newt{type=
+ #'Externaltypereference'{module=S#state.mname,
+ type='EMBEDDED PDV'},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_EMBEDDED_PDV))};
+ 'BOOLEAN'->
+ check_boolean(S,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_BOOLEAN))};
+ 'OCTET STRING' ->
+ check_octetstring(S,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_OCTET_STRING))};
+ 'NumericString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_NumericString))};
+ 'TeletexString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_TeletexString))};
+ 'VideotexString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_VideotexString))};
+ 'UTCTime' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_UTCTime))};
+ 'GeneralizedTime' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_GeneralizedTime))};
+ 'GraphicString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_GraphicString))};
+ 'VisibleString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_VisibleString))};
+ 'GeneralString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_GeneralString))};
+ 'PrintableString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_PrintableString))};
+ 'IA5String' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_IA5String))};
+ 'BMPString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_BMPString))};
+ 'UniversalString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_UniversalString))};
+ 'CHARACTER STRING' ->
+% AssociatedType = asn1_db:dbget(S#state.mname,
+% 'CHARACTER STRING'),
+% CheckedType = check_type(S,Type,
+% AssociatedType#typedef.typespec),
+ put(character_string,unchecked),
+ TempNewDef#newt{type=
+ #'Externaltypereference'{module=S#state.mname,
+ type='CHARACTER STRING'},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_CHARACTER_STRING))};
+ Seq when record(Seq,'SEQUENCE') ->
+ RecordName =
+ case TopName of
+ [] ->
+ [Type#typedef.name];
+ _ ->
+ TopName
+ end,
+ {TableCInf,Components} =
+ check_sequence(S#state{recordtopname=
+ RecordName},
+ Type,Seq#'SEQUENCE'.components),
+ TempNewDef#newt{type=Seq#'SEQUENCE'{tablecinf=TableCInf,
+ components=Components},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))};
+ {'SEQUENCE OF',Components} ->
+ TempNewDef#newt{type={'SEQUENCE OF',check_sequenceof(S,Type,Components)},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))};
+ {'CHOICE',Components} ->
+ Ct = maybe_illicit_implicit_tag(choice,Tag),
+ TempNewDef#newt{type={'CHOICE',check_choice(S,Type,Components)},tag=Ct};
+ Set when record(Set,'SET') ->
+ RecordName=
+ case TopName of
+ [] ->
+ [Type#typedef.name];
+ _ ->
+ TopName
+ end,
+ {Sorted,TableCInf,Components} =
+ check_set(S#state{recordtopname=RecordName},
+ Type,Set#'SET'.components),
+ TempNewDef#newt{type=Set#'SET'{sorted=Sorted,
+ tablecinf=TableCInf,
+ components=Components},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))};
+ {'SET OF',Components} ->
+ TempNewDef#newt{type={'SET OF',check_setof(S,Type,Components)},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))};
+ %% This is a temporary hack until the full Information Obj Spec
+ %% in X.681 is supported
+ {{typereference,_,'TYPE-IDENTIFIER'},[{typefieldreference,_,'Type'}]} ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+
+ {#'Externaltypereference'{type='TYPE-IDENTIFIER'},
+ [{typefieldreference,_,'Type'}]} ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+
+ {pt,Ptype,ParaList} ->
+ %% Ptype might be a parameterized - type, object set or
+ %% value set. If it isn't a parameterized type notify the
+ %% calling function.
+ {_,Ptypedef} = get_referenced_type(S,Ptype),
+ notify_if_not_ptype(S,Ptypedef),
+ NewParaList = [match_parameters(TmpParam,S#state.parameters)||
+ TmpParam <- ParaList],
+ Instance = instantiate_ptype(S,Ptypedef,NewParaList),
+ TempNewDef#newt{type=Instance#type.def,
+ tag=merge_tags(Tag,Instance#type.tag),
+ constraint=Instance#type.constraint,
+ inlined=yes};
+
+% {ClRef,FieldRefList} when record(ClRef,'Externaltypereference') ->
+ OCFT=#'ObjectClassFieldType'{class=ClRef} ->
+ %% this case occures in a SEQUENCE when
+ %% the type of the component is a ObjectClassFieldType
+ ClassSpec = check_class(S,ClRef),
+ NewTypeDef = maybe_open_type(S,ClassSpec,OCFT,Constr),
+ InnerTag = get_innertag(S,NewTypeDef),
+ MergedTag = merge_tags(Tag,InnerTag),
+ Ct =
+ case is_open_type(NewTypeDef) of
+ true ->
+ maybe_illicit_implicit_tag(open_type,MergedTag);
+ _ ->
+ MergedTag
+ end,
+ TempNewDef#newt{type=NewTypeDef,tag=Ct};
+ {valueset,Vtype} ->
+ TempNewDef#newt{type={valueset,check_type(S,Type,Vtype)}};
+ Other ->
+ exit({'cant check' ,Other})
+ end,
+ Ts2 = case NewDef of
+ #newt{type=unchanged} ->
+ Ts#type{def=Def};
+ #newt{type=TDef}->
+ Ts#type{def=TDef}
+ end,
+ NewTag = case NewDef of
+ #newt{tag=unchanged} ->
+ Tag;
+ #newt{tag=TT} ->
+ TT
+ end,
+ T3 = Ts2#type{tag = lists:map(fun(TempTag = #tag{type={default,TTx}}) ->
+ TempTag#tag{type=TTx};
+ (Else) -> Else end, NewTag)},
+ T4 = case NewDef of
+ #newt{constraint=unchanged} ->
+ T3#type{constraint=Constr};
+ #newt{constraint=NewConstr} ->
+ T3#type{constraint=NewConstr}
+ end,
+ T5 = T4#type{inlined=NewDef#newt.inlined},
+ T5#type{constraint=check_constraints(S,T5#type.constraint)}.
+
+
+get_innertag(_S,#'ObjectClassFieldType'{type=Type}) ->
+ case Type of
+ #type{tag=Tag} -> Tag;
+ {fixedtypevaluefield,_,#type{tag=Tag}} -> Tag;
+ {TypeFieldName,_} when atom(TypeFieldName) -> [];
+ _ -> []
+ end;
+get_innertag(_S,_) ->
+ [].
+
+is_class(_S,#classdef{}) ->
+ true;
+is_class(S,#typedef{typespec=#type{def=Eref}})
+ when record(Eref,'Externaltypereference')->
+ {_,NextDef} = get_referenced_type(S,Eref),
+ is_class(S,NextDef);
+is_class(_,_) ->
+ false.
+
+get_class_def(_S,CD=#classdef{}) ->
+ CD;
+get_class_def(S,#typedef{typespec=#type{def=Eref}})
+ when record(Eref,'Externaltypereference') ->
+ {_,NextDef} = get_referenced_type(S,Eref),
+ get_class_def(S,NextDef).
+
+maybe_illicit_implicit_tag(Kind,Tag) ->
+ case Tag of
+ [#tag{type='IMPLICIT'}|_T] ->
+ throw({error,{asn1,{implicit_tag_before,Kind}}});
+ [ChTag = #tag{type={default,_}}|T] ->
+ case Kind of
+ open_type ->
+ [ChTag#tag{type='EXPLICIT',form=32}|T]; %X.680 30.6c, X.690 8.14.2
+ choice ->
+ [ChTag#tag{type='EXPLICIT',form=32}|T] % X.680 28.6 c, 30.6c
+ end;
+ _ ->
+ Tag % unchanged
+ end.
+
+%% maybe_open_type/2 -> {ClassSpec,FieldRefList} | 'ASN1_OPEN_TYPE'
+%% if the FieldRefList points out a typefield and the class don't have
+%% any UNIQUE field, so that a component relation constraint cannot specify
+%% the type of a typefield, return 'ASN1_OPEN_TYPE', otherwise return
+%% {ClassSpec,FieldRefList}.
+maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
+ OCFT=#'ObjectClassFieldType'{fieldname=FieldRefList},
+ Constr) ->
+ Type = get_ObjectClassFieldType(S,Fs,FieldRefList),
+ FieldNames=get_referenced_fieldname(FieldRefList),
+ case lists:last(FieldRefList) of
+ {valuefieldreference,_} ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type=Type};
+ {typefieldreference,_} ->
+ case {catch get_unique_fieldname(#classdef{typespec=ClassSpec}),
+ asn1ct_gen:get_constraint(Constr,componentrelation)}of
+ {Tuple,_} when tuple(Tuple) ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type='ASN1_OPEN_TYPE'};
+ {_,no} ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type='ASN1_OPEN_TYPE'};
+ _ ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type=Type}
+ end
+ end.
+
+is_open_type(#'ObjectClassFieldType'{type='ASN1_OPEN_TYPE'}) ->
+ true;
+is_open_type(#'ObjectClassFieldType'{}) ->
+ false.
+
+
+notify_if_not_ptype(S,#pvaluesetdef{type=Type}) ->
+ case Type#type.def of
+ Ref when record(Ref,'Externaltypereference') ->
+ case get_referenced_type(S,Ref) of
+ {_,#classdef{}} ->
+ throw(pobjectsetdef);
+ {_,#typedef{}} ->
+ throw(pvalueset)
+ end;
+ T when record(T,type) -> % this must be a value set
+ throw(pvalueset)
+ end;
+notify_if_not_ptype(_S,#ptypedef{}) ->
+ ok.
+
+% fix me
+instantiate_ptype(S,Ptypedef,ParaList) ->
+ #ptypedef{args=Args,typespec=Type} = Ptypedef,
+% Args = get_pt_args(Ptypedef),
+% Type = get_pt_spec(Ptypedef),
+ MatchedArgs = match_args(Args, ParaList, []),
+ NewS = S#state{type=Type,parameters=MatchedArgs,abscomppath=[]},
+ %The abscomppath must be empty since a table constraint in a
+ %parameterized type only can refer to components within the type
+ check_type(NewS, Ptypedef, Type).
+
+get_pt_args(#ptypedef{args=Args}) ->
+ Args;
+get_pt_args(#pvaluesetdef{args=Args}) ->
+ Args;
+get_pt_args(#pvaluedef{args=Args}) ->
+ Args;
+get_pt_args(#pobjectdef{args=Args}) ->
+ Args;
+get_pt_args(#pobjectsetdef{args=Args}) ->
+ Args.
+
+get_pt_spec(#ptypedef{typespec=Type}) ->
+ Type;
+get_pt_spec(#pvaluedef{value=Value}) ->
+ Value;
+get_pt_spec(#pvaluesetdef{valueset=VS}) ->
+ VS;
+get_pt_spec(#pobjectdef{def=Def}) ->
+ Def;
+get_pt_spec(#pobjectsetdef{def=Def}) ->
+ Def.
+
+
+
+match_args([FormArg|Ft], [ActArg|At], Acc) ->
+ match_args(Ft, At, [{FormArg,ActArg}|Acc]);
+match_args([], [], Acc) ->
+ lists:reverse(Acc);
+match_args(_, _, _) ->
+ throw({error,{asn1,{wrong_number_of_arguments}}}).
+
+check_constraints(S,C) when list(C) ->
+ check_constraints(S, C, []);
+check_constraints(S,C) when record(C,constraint) ->
+ check_constraints(S, C#constraint.c, []).
+
+
+resolv_tuple_or_list(S,List) when list(List) ->
+ lists:map(fun(X)->resolv_value(S,X) end, List);
+resolv_tuple_or_list(S,{Lb,Ub}) ->
+ {resolv_value(S,Lb),resolv_value(S,Ub)}.
+
+%%%-----------------------------------------
+%% If the constraint value is a defined value the valuename
+%% is replaced by the actual value
+%%
+resolv_value(S,Val) ->
+ case match_parameters(Val, S#state.parameters) of
+ Id -> % unchanged
+ resolv_value1(S,Id);
+ Other ->
+ resolv_value(S,Other)
+ end.
+
+resolv_value1(S = #state{mname=M,inputmodules=InpMods},
+ V=#'Externalvaluereference'{pos=Pos,module=ExtM,value=Name}) ->
+ case ExtM of
+ M -> resolv_value2(S,M,Name,Pos);
+ _ ->
+ case lists:member(ExtM,InpMods) of
+ true ->
+ resolv_value2(S,M,Name,Pos);
+ false ->
+ V
+ end
+ end;
+resolv_value1(S,{gt,V}) ->
+ case V of
+ Int when integer(Int) ->
+ V + 1;
+ #valuedef{value=Int} ->
+ 1 + resolv_value(S,Int);
+ Other ->
+ throw({error,{asn1,{undefined_type_or_value,Other}}})
+ end;
+resolv_value1(S,{lt,V}) ->
+ case V of
+ Int when integer(Int) ->
+ V - 1;
+ #valuedef{value=Int} ->
+ resolv_value(S,Int) - 1;
+ Other ->
+ throw({error,{asn1,{undefined_type_or_value,Other}}})
+ end;
+resolv_value1(S,{'ValueFromObject',{object,Object},[{valuefieldreference,
+ FieldName}]}) ->
+ %% FieldName can hold either a fixed-type value or a variable-type value
+ %% Object is a DefinedObject, i.e. a #'Externaltypereference'
+ {_,ObjTDef} = get_referenced_type(S,Object),
+ TS = check_object(S,ObjTDef,ObjTDef#typedef.typespec),
+ {_,_,Components} = TS#'Object'.def,
+ case lists:keysearch(FieldName,1,Components) of
+ {value,{_,#valuedef{value=Val}}} ->
+ Val;
+ _ ->
+ error({value,"illegal value in constraint",S})
+ end;
+% resolv_value1(S,{'ValueFromObject',{po,Object,Params},FieldName}) ->
+% %% FieldName can hold either a fixed-type value or a variable-type value
+% %% Object is a ParameterizedObject
+resolv_value1(_,V) ->
+ V.
+
+resolv_value2(S,ModuleName,Name,Pos) ->
+ case asn1_db:dbget(ModuleName,Name) of
+ undefined ->
+ case imported(S,Name) of
+ {ok,Imodule} ->
+ {_,V2} = get_referenced(S,Imodule,Name,Pos),
+ V2#valuedef.value;
+ _ ->
+ throw({error,{asn1,{undefined_type_or_value,Name}}})
+ end;
+ Val ->
+ Val#valuedef.value
+ end.
+
+check_constraints(S,[{'ContainedSubtype',Type} | Rest], Acc) ->
+ {_,CTDef} = get_referenced_type(S,Type#type.def),
+ CType = check_type(S,S#state.tname,CTDef#typedef.typespec),
+ check_constraints(S,Rest,CType#type.constraint ++ Acc);
+check_constraints(S,[C | Rest], Acc) ->
+ check_constraints(S,Rest,[check_constraint(S,C) | Acc]);
+check_constraints(S,[],Acc) ->
+% io:format("Acc: ~p~n",[Acc]),
+ C = constraint_merge(S,lists:reverse(Acc)),
+% io:format("C: ~p~n",[C]),
+ lists:flatten(C).
+
+
+range_check(F={FixV,FixV}) ->
+% FixV;
+ F;
+range_check(VR={Lb,Ub}) when Lb < Ub ->
+ VR;
+range_check(Err={_,_}) ->
+ throw({error,{asn1,{illegal_size_constraint,Err}}});
+range_check(Value) ->
+ Value.
+
+check_constraint(S,Ext) when record(Ext,'Externaltypereference') ->
+ check_externaltypereference(S,Ext);
+
+
+check_constraint(S,{'SizeConstraint',{Lb,Ub}})
+ when list(Lb);tuple(Lb),size(Lb)==2 ->
+ case Lb of
+ #'Externalvaluereference'{} ->
+ check_constraint(S,{'SizeConstraint',{resolv_value(S,Lb),Ub}});
+ _ ->
+ NewLb = range_check(resolv_tuple_or_list(S,Lb)),
+ NewUb = range_check(resolv_tuple_or_list(S,Ub)),
+ {'SizeConstraint',{NewLb,NewUb}}
+ end;
+check_constraint(S,{'SizeConstraint',{Lb,Ub}}) ->
+ case {resolv_value(S,Lb),resolv_value(S,Ub)} of
+ {FixV,FixV} ->
+ {'SizeConstraint',FixV};
+ {Low,High} when Low < High ->
+ {'SizeConstraint',{Low,High}};
+ Err ->
+ throw({error,{asn1,{illegal_size_constraint,Err}}})
+ end;
+check_constraint(S,{'SizeConstraint',Lb}) ->
+ {'SizeConstraint',resolv_value(S,Lb)};
+
+check_constraint(S,{'SingleValue', L}) when list(L) ->
+ F = fun(A) -> resolv_value(S,A) end,
+ {'SingleValue',lists:map(F,L)};
+
+check_constraint(S,{'SingleValue', V}) when integer(V) ->
+ Val = resolv_value(S,V),
+%% [{'SingleValue',Val},{'ValueRange',{Val,Val}}]; % Why adding value range?
+ {'SingleValue',Val};
+check_constraint(S,{'SingleValue', V}) ->
+ {'SingleValue',resolv_value(S,V)};
+
+check_constraint(S,{'ValueRange', {Lb, Ub}}) ->
+ {'ValueRange',{resolv_value(S,Lb),resolv_value(S,Ub)}};
+
+%%check_constraint(S,{'ContainedSubtype',Type}) ->
+%% #typedef{typespec=TSpec} =
+%% check_type(S,S#state.tname,get_referenced_type(S,Type#type.def)),
+%% [C] = TSpec#type.constraint,
+%% C;
+
+check_constraint(S,{valueset,Type}) ->
+ {valueset,check_type(S,S#state.tname,Type)};
+
+check_constraint(S,{simpletable,Type}) ->
+ OSName = (Type#type.def)#'Externaltypereference'.type,
+ C = match_parameters(Type#type.def,S#state.parameters),
+ case C of
+ #'Externaltypereference'{} ->
+ Type#type{def=check_externaltypereference(S,C)},
+ {simpletable,OSName};
+ _ ->
+ check_type(S,S#state.tname,Type),
+ {simpletable,OSName}
+ end;
+
+check_constraint(S,{componentrelation,{objectset,Opos,Objset},Id}) ->
+ %% Objset is an 'Externaltypereference' record, since Objset is
+ %% a DefinedObjectSet.
+ RealObjset = match_parameters(Objset,S#state.parameters),
+ Ext = check_externaltypereference(S,RealObjset),
+ {componentrelation,{objectset,Opos,Ext},Id};
+
+check_constraint(S,Type) when record(Type,type) ->
+ #type{def=Def} = check_type(S,S#state.tname,Type),
+ Def;
+
+check_constraint(S,C) when list(C) ->
+ lists:map(fun(X)->check_constraint(S,X) end,C);
+% else keep the constraint unchanged
+check_constraint(_S,Any) ->
+% io:format("Constraint = ~p~n",[Any]),
+ Any.
+
+%% constraint_merge/2
+%% Compute the intersection of the outermost level of the constraint list.
+%% See Dubuisson second paragraph and fotnote on page 285.
+%% If constraints with extension are included in combined constraints. The
+%% resulting combination will have the extension of the last constraint. Thus,
+%% there will be no extension if the last constraint is without extension.
+%% The rootset of all constraints are considered in the "outermoust
+%% intersection". See section 13.1.2 in Dubuisson.
+constraint_merge(_S,C=[H])when tuple(H) ->
+ C;
+constraint_merge(_S,[]) ->
+ [];
+constraint_merge(S,C) ->
+ %% skip all extension but the last
+ C1 = filter_extensions(C),
+ %% perform all internal level intersections, intersections first
+ %% since they have precedence over unions
+ C2 = lists:map(fun(X)when list(X)->constraint_intersection(S,X);
+ (X) -> X end,
+ C1),
+ %% perform all internal level unions
+ C3 = lists:map(fun(X)when list(X)->constraint_union(S,X);
+ (X) -> X end,
+ C2),
+
+ %% now get intersection of the outermost level
+ %% get the least common single value constraint
+ SVs = get_constraints(C3,'SingleValue'),
+ CombSV = intersection_of_sv(S,SVs),
+ %% get the least common value range constraint
+ VRs = get_constraints(C3,'ValueRange'),
+ CombVR = intersection_of_vr(S,VRs),
+ %% get the least common size constraint
+ SZs = get_constraints(C3,'SizeConstraint'),
+ CombSZ = intersection_of_size(S,SZs),
+ CminusSVs=ordsets:subtract(ordsets:from_list(C3),ordsets:from_list(SVs)),
+ % CminusSVsVRs = ordsets:subtract(ordsets:from_list(CminusSVs),
+% ordsets:from_list(VRs)),
+ RestC = ordsets:subtract(ordsets:from_list(CminusSVs),
+ ordsets:from_list(SZs)),
+ %% get the least common combined constraint. That is the union of each
+ %% deep costraint and merge of single value and value range constraints
+ combine_constraints(S,CombSV,CombVR,CombSZ++RestC).
+
+%% constraint_union(S,C) takes a list of constraints as input and
+%% merge them to a union. Unions are performed when two
+%% constraints is found with an atom union between.
+%% The list may be nested. Fix that later !!!
+constraint_union(_S,[]) ->
+ [];
+constraint_union(_S,C=[_E]) ->
+ C;
+constraint_union(S,C) when list(C) ->
+ case lists:member(union,C) of
+ true ->
+ constraint_union1(S,C,[]);
+ _ ->
+ C
+ end;
+% SV = get_constraints(C,'SingleValue'),
+% SV1 = constraint_union_sv(S,SV),
+% VR = get_constraints(C,'ValueRange'),
+% VR1 = constraint_union_vr(VR),
+% RestC = ordsets:filter(fun({'SingleValue',_})->false;
+% ({'ValueRange',_})->false;
+% (_) -> true end,ordsets:from_list(C)),
+% SV1++VR1++RestC;
+constraint_union(_S,C) ->
+ [C].
+
+constraint_union1(S,[A={'ValueRange',_},union,B={'ValueRange',_}|Rest],Acc) ->
+ AunionB = constraint_union_vr([A,B]),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[A={'SingleValue',_},union,B={'SingleValue',_}|Rest],Acc) ->
+ AunionB = constraint_union_sv(S,[A,B]),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[A={'SingleValue',_},union,B={'ValueRange',_}|Rest],Acc) ->
+ AunionB = union_sv_vr(S,A,B),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[A={'ValueRange',_},union,B={'SingleValue',_}|Rest],Acc) ->
+ AunionB = union_sv_vr(S,B,A),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[union|Rest],Acc) -> %skip when unsupported constraints
+ constraint_union1(S,Rest,Acc);
+constraint_union1(S,[A|Rest],Acc) ->
+ constraint_union1(S,Rest,[A|Acc]);
+constraint_union1(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+constraint_union_sv(_S,SV) ->
+ Values=lists:map(fun({_,V})->V end,SV),
+ case ordsets:from_list(Values) of
+ [] -> [];
+ [N] -> [{'SingleValue',N}];
+ L -> [{'SingleValue',L}]
+ end.
+
+%% REMOVE????
+%%constraint_union(S,VR,'ValueRange') ->
+%% constraint_union_vr(VR).
+
+%% constraint_union_vr(VR)
+%% VR = [{'ValueRange',{Lb,Ub}},...]
+%% Lb = 'MIN' | integer()
+%% Ub = 'MAX' | integer()
+%% Returns if possible only one ValueRange tuple with a range that
+%% is a union of all ranges in VR.
+constraint_union_vr(VR) ->
+ %% Sort VR by Lb in first hand and by Ub in second hand
+ Fun=fun({_,{'MIN',_B1}},{_,{A2,_B2}}) when integer(A2)->true;
+ ({_,{A1,_B1}},{_,{'MAX',_B2}}) when integer(A1) -> true;
+ ({_,{A1,_B1}},{_,{A2,_B2}}) when integer(A1),integer(A2),A1<A2 -> true;
+ ({_,{A,B1}},{_,{A,B2}}) when B1=<B2->true;
+ (_,_)->false end,
+ constraint_union_vr(lists:usort(Fun,VR),[]).
+
+constraint_union_vr([],Acc) ->
+ lists:reverse(Acc);
+constraint_union_vr([C|Rest],[]) ->
+ constraint_union_vr(Rest,[C]);
+constraint_union_vr([{_,{Lb,Ub2}}|Rest],[{_,{Lb,_Ub1}}|Acc]) -> %Ub2 > Ub1
+ constraint_union_vr(Rest,[{'ValueRange',{Lb,Ub2}}|Acc]);
+constraint_union_vr([{_,{_,Ub}}|Rest],A=[{_,{_,Ub}}|_Acc]) ->
+ constraint_union_vr(Rest,A);
+constraint_union_vr([{_,{Lb2,Ub2}}|Rest],[{_,{Lb1,Ub1}}|Acc]) when Lb2=<Ub1,
+ Ub2>Ub1->
+ constraint_union_vr(Rest,[{'ValueRange',{Lb1,Ub2}}|Acc]);
+constraint_union_vr([{_,{_,Ub2}}|Rest],A=[{_,{_,Ub1}}|_Acc]) when Ub2=<Ub1->
+ constraint_union_vr(Rest,A);
+constraint_union_vr([VR|Rest],Acc) ->
+ constraint_union_vr(Rest,[VR|Acc]).
+
+union_sv_vr(_S,[],B) ->
+ [B];
+union_sv_vr(_S,A,[]) ->
+ [A];
+union_sv_vr(_S,C1={'SingleValue',SV},C2={'ValueRange',VR={Lb,Ub}})
+ when integer(SV) ->
+ case is_int_in_vr(SV,C2) of
+ true -> [C2];
+ _ ->
+ case VR of
+ {'MIN',Ub} when SV==Ub+1 -> [{'ValueRange',{'MIN',SV}}];
+ {Lb,'MAX'} when SV==Lb-1 -> [{'ValueRange',{SV,'MAX'}}];
+ {Lb,Ub} when SV==Ub+1 -> [{'ValueRange',{Lb,SV}}];
+ {Lb,Ub} when SV==Lb-1 -> [{'ValueRange',{SV,Ub}}];
+ _ ->
+ [C1,C2]
+ end
+ end;
+union_sv_vr(_S,C1={'SingleValue',SV},C2={'ValueRange',{_Lb,_Ub}})
+ when list(SV) ->
+ case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of
+ [] -> [C2];
+ L ->
+ case expand_vr(L,C2) of
+ {[],C3} -> [C3];
+ {L,C2} -> [C1,C2];
+ {[Val],C3} -> [{'SingleValue',Val},C3];
+ {L2,C3} -> [{'SingleValue',L2},C3]
+ end
+ end.
+
+expand_vr(L,VR={_,{Lb,Ub}}) ->
+ case lower_Lb(L,Lb) of
+ false ->
+ case higher_Ub(L,Ub) of
+ false ->
+ {L,VR};
+ {L1,UbNew} ->
+ expand_vr(L1,{'ValueRange',{Lb,UbNew}})
+ end;
+ {L1,LbNew} ->
+ expand_vr(L1,{'ValueRange',{LbNew,Ub}})
+ end.
+
+lower_Lb(_,'MIN') ->
+ false;
+lower_Lb(L,Lb) ->
+ remove_val_from_list(Lb - 1,L).
+
+higher_Ub(_,'MAX') ->
+ false;
+higher_Ub(L,Ub) ->
+ remove_val_from_list(Ub + 1,L).
+
+remove_val_from_list(List,Val) ->
+ case lists:member(Val,List) of
+ true ->
+ {lists:delete(Val,List),Val};
+ false ->
+ false
+ end.
+
+%% get_constraints/2
+%% Arguments are a list of constraints, which has the format {key,value},
+%% and a constraint type
+%% Returns a list of constraints only of the requested type or the atom
+%% 'no' if no such constraints were found
+get_constraints(L=[{CType,_}],CType) ->
+ L;
+get_constraints(C,CType) ->
+ keysearch_allwithkey(CType,1,C).
+
+%% keysearch_allwithkey(Key,Ix,L)
+%% Types:
+%% Key = atom()
+%% Ix = integer()
+%% L = [TwoTuple]
+%% TwoTuple = [{atom(),term()}|...]
+%% Returns a List that contains all
+%% elements from L that has a key Key as element Ix
+keysearch_allwithkey(Key,Ix,L) ->
+ lists:filter(fun(X) when tuple(X) ->
+ case element(Ix,X) of
+ Key -> true;
+ _ -> false
+ end;
+ (_) -> false
+ end, L).
+
+
+%% filter_extensions(C)
+%% takes a list of constraints as input and
+%% returns a list with the intersection of all extension roots
+%% and only the extension of the last constraint kept if any
+%% extension in the last constraint
+filter_extensions([]) ->
+ [];
+filter_extensions(C=[_H]) ->
+ C;
+filter_extensions(C) when list(C) ->
+ filter_extensions(C,[]).
+
+filter_extensions([C],Acc) ->
+ lists:reverse([C|Acc]);
+filter_extensions([{C,_E},H2|T],Acc) when tuple(C) ->
+ filter_extensions([H2|T],[C|Acc]);
+filter_extensions([{'SizeConstraint',{A,_B}},H2|T],Acc)
+ when list(A);tuple(A) ->
+ filter_extensions([H2|T],[{'SizeConstraint',A}|Acc]);
+filter_extensions([H1,H2|T],Acc) ->
+ filter_extensions([H2|T],[H1|Acc]).
+
+%% constraint_intersection(S,C) takes a list of constraints as input and
+%% performs intersections. Intersecions are performed when an
+%% atom intersection is found between two constraints.
+%% The list may be nested. Fix that later !!!
+constraint_intersection(_S,[]) ->
+ [];
+constraint_intersection(_S,C=[_E]) ->
+ C;
+constraint_intersection(S,C) when list(C) ->
+% io:format("constraint_intersection: ~p~n",[C]),
+ case lists:member(intersection,C) of
+ true ->
+ constraint_intersection1(S,C,[]);
+ _ ->
+ C
+ end;
+constraint_intersection(_S,C) ->
+ [C].
+
+constraint_intersection1(S,[A,intersection,B|Rest],Acc) ->
+ AisecB = c_intersect(S,A,B),
+ constraint_intersection1(S,Rest,AisecB++Acc);
+constraint_intersection1(S,[A|Rest],Acc) ->
+ constraint_intersection1(S,Rest,[A|Acc]);
+constraint_intersection1(_,[],Acc) ->
+ lists:reverse(Acc).
+
+c_intersect(S,C1={'SingleValue',_},C2={'SingleValue',_}) ->
+ intersection_of_sv(S,[C1,C2]);
+c_intersect(S,C1={'ValueRange',_},C2={'ValueRange',_}) ->
+ intersection_of_vr(S,[C1,C2]);
+c_intersect(S,C1={'ValueRange',_},C2={'SingleValue',_}) ->
+ intersection_sv_vr(S,[C2],[C1]);
+c_intersect(S,C1={'SingleValue',_},C2={'ValueRange',_}) ->
+ intersection_sv_vr(S,[C1],[C2]);
+c_intersect(_S,C1,C2) ->
+ [C1,C2].
+
+%% combine_constraints(S,SV,VR,CComb)
+%% Types:
+%% S = record(state,S)
+%% SV = [] | [SVC]
+%% VR = [] | [VRC]
+%% CComb = [] | [Lists]
+%% SVC = {'SingleValue',integer()} | {'SingleValue',[integer(),...]}
+%% VRC = {'ValueRange',{Lb,Ub}}
+%% Lists = List of lists containing any constraint combination
+%% Lb = 'MIN' | integer()
+%% Ub = 'MAX' | integer()
+%% Returns a combination of the least common constraint among SV,VR and all
+%% elements in CComb
+combine_constraints(_S,[],VR,CComb) ->
+ VR ++ CComb;
+% combine_combined_cnstr(S,VR,CComb);
+combine_constraints(_S,SV,[],CComb) ->
+ SV ++ CComb;
+% combine_combined_cnstr(S,SV,CComb);
+combine_constraints(S,SV,VR,CComb) ->
+ C=intersection_sv_vr(S,SV,VR),
+ C ++ CComb.
+% combine_combined_cnstr(S,C,CComb).
+
+intersection_sv_vr(_,[],_VR) ->
+ [];
+intersection_sv_vr(_,_SV,[]) ->
+ [];
+intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2={'ValueRange',{_Lb,_Ub}}])
+ when integer(SV) ->
+ case is_int_in_vr(SV,C2) of
+ true -> [C1];
+ _ -> %%error({type,{"asn1 illegal constraint",C1,C2},S})
+ throw({error,{"asn1 illegal constraint",C1,C2}})
+ end;
+intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2])
+ when list(SV) ->
+ case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of
+ [] ->
+ %%error({type,{"asn1 illegal constraint",C1,C2},S});
+ throw({error,{"asn1 illegal constraint",C1,C2}});
+ [V] -> [{'SingleValue',V}];
+ L -> [{'SingleValue',L}]
+ end.
+
+
+
+intersection_of_size(_,[]) ->
+ [];
+intersection_of_size(_,C=[_SZ]) ->
+ C;
+intersection_of_size(S,[SZ,SZ|Rest]) ->
+ intersection_of_size(S,[SZ|Rest]);
+intersection_of_size(S,C=[C1={_,Int},{_,Range}|Rest])
+ when integer(Int),tuple(Range) ->
+ case Range of
+ {Lb,Ub} when Int >= Lb,
+ Int =< Ub ->
+ intersection_of_size(S,[C1|Rest]);
+ _ ->
+ throw({error,{asn1,{illegal_size_constraint,C}}})
+ end;
+intersection_of_size(S,[C1={_,Range},C2={_,Int}|Rest])
+ when integer(Int),tuple(Range) ->
+ intersection_of_size(S,[C2,C1|Rest]);
+intersection_of_size(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
+ Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
+ Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
+ intersection_of_size(S,[{'SizeConstraint',{Lb,Ub}}|Rest]);
+intersection_of_size(_,SZ) ->
+ throw({error,{asn1,{illegal_size_constraint,SZ}}}).
+
+intersection_of_vr(_,[]) ->
+ [];
+intersection_of_vr(_,VR=[_C]) ->
+ VR;
+intersection_of_vr(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
+ Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
+ Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
+ intersection_of_vr(S,[{'ValueRange',{Lb,Ub}}|Rest]);
+intersection_of_vr(_S,VR) ->
+ %%error({type,{asn1,{illegal_value_range_constraint,VR}},S});
+ throw({error,{asn1,{illegal_value_range_constraint,VR}}}).
+
+intersection_of_sv(_,[]) ->
+ [];
+intersection_of_sv(_,SV=[_C]) ->
+ SV;
+intersection_of_sv(S,[SV,SV|Rest]) ->
+ intersection_of_sv(S,[SV|Rest]);
+intersection_of_sv(S,[{_,Int},{_,SV}|Rest]) when integer(Int),
+ list(SV) ->
+ SV2=intersection_of_sv1(S,Int,SV),
+ intersection_of_sv(S,[SV2|Rest]);
+intersection_of_sv(S,[{_,SV},{_,Int}|Rest]) when integer(Int),
+ list(SV) ->
+ SV2=intersection_of_sv1(S,Int,SV),
+ intersection_of_sv(S,[SV2|Rest]);
+intersection_of_sv(S,[{_,SV1},{_,SV2}|Rest]) when list(SV1),
+ list(SV2) ->
+ SV3=common_set(SV1,SV2),
+ intersection_of_sv(S,[SV3|Rest]);
+intersection_of_sv(_S,SV) ->
+ %%error({type,{asn1,{illegal_single_value_constraint,SV}},S}).
+ throw({error,{asn1,{illegal_single_value_constraint,SV}}}).
+
+intersection_of_sv1(_S,Int,SV) when integer(Int),list(SV) ->
+ case lists:member(Int,SV) of
+ true -> {'SingleValue',Int};
+ _ ->
+ %%error({type,{asn1,{illegal_single_value_constraint,Int,SV}},S})
+ throw({error,{asn1,{illegal_single_value_constraint,Int,SV}}})
+ end;
+intersection_of_sv1(_S,SV1,SV2) ->
+ %%error({type,{asn1,{illegal_single_value_constraint,SV1,SV2}},S}).
+ throw({error,{asn1,{illegal_single_value_constraint,SV1,SV2}}}).
+
+greatest_LB([H]) ->
+ H;
+greatest_LB(L) ->
+ greatest_LB1(lists:reverse(L)).
+greatest_LB1(['MIN',H2|_T])->
+ H2;
+greatest_LB1([H|_T]) ->
+ H.
+smallest_UB(L) ->
+ hd(L).
+
+common_set(SV1,SV2) ->
+ lists:filter(fun(X)->lists:member(X,SV1) end,SV2).
+
+is_int_in_vr(Int,{_,{'MIN','MAX'}}) when integer(Int) ->
+ true;
+is_int_in_vr(Int,{_,{'MIN',Ub}}) when integer(Int),Int =< Ub ->
+ true;
+is_int_in_vr(Int,{_,{Lb,'MAX'}}) when integer(Int),Int >= Lb ->
+ true;
+is_int_in_vr(Int,{_,{Lb,Ub}}) when integer(Int),Int >= Lb,Int =< Ub ->
+ true;
+is_int_in_vr(_,_) ->
+ false.
+
+
+
+check_imported(_S,Imodule,Name) ->
+ case asn1_db:dbget(Imodule,'MODULE') of
+ undefined ->
+ io:format("~s.asn1db not found~n",[Imodule]),
+ io:format("Type ~s imported from non existing module ~s~n",[Name,Imodule]);
+ Im when record(Im,module) ->
+ case is_exported(Im,Name) of
+ false ->
+ io:format("Imported type ~s not exported from module ~s~n",[Name,Imodule]);
+ _ ->
+ ok
+ end
+ end,
+ ok.
+
+is_exported(Module,Name) when record(Module,module) ->
+ {exports,Exports} = Module#module.exports,
+ case Exports of
+ all ->
+ true;
+ [] ->
+ false;
+ L when list(L) ->
+ case lists:keysearch(Name,#'Externaltypereference'.type,Exports) of
+ false -> false;
+ _ -> true
+ end
+ end.
+
+
+
+check_externaltypereference(S,Etref=#'Externaltypereference'{module=Emod})->
+ Currmod = S#state.mname,
+ MergedMods = S#state.inputmodules,
+ case Emod of
+ Currmod ->
+ %% reference to current module or to imported reference
+ check_reference(S,Etref);
+ _ ->
+ %% io:format("Type ~s IMPORTED FROM ~s~n",[Etype,Emod]),
+ case lists:member(Emod,MergedMods) of
+ true ->
+ check_reference(S,Etref);
+ false ->
+ Etref
+ end
+ end.
+
+check_reference(S,#'Externaltypereference'{pos=Pos,module=Emod,type=Name}) ->
+ ModName = S#state.mname,
+ case asn1_db:dbget(ModName,Name) of
+ undefined ->
+ case imported(S,Name) of
+ {ok,Imodule} ->
+ check_imported(S,Imodule,Name),
+ #'Externaltypereference'{module=Imodule,type=Name};
+ _ ->
+ %may be a renamed type in multi file compiling!
+ {_,T}=renamed_reference(S,Name,Emod),
+ NewName = asn1ct:get_name_of_def(T),
+ NewPos = asn1ct:get_pos_of_def(T),
+ #'Externaltypereference'{pos=NewPos,
+ module=ModName,
+ type=NewName}
+ end;
+ _ ->
+ %% cannot do check_type here due to recursive definitions, like
+ %% S ::= SEQUENCE {a INTEGER, b S}. This implies that references
+ %% that appear before the definition will be an
+ %% Externaltypereference in the abstract syntax tree
+ #'Externaltypereference'{pos=Pos,module=ModName,type=Name}
+ end.
+
+
+name2Extref(_Mod,Name) when record(Name,'Externaltypereference') ->
+ Name;
+name2Extref(Mod,Name) ->
+ #'Externaltypereference'{module=Mod,type=Name}.
+
+get_referenced_type(S,Ext) when record(Ext,'Externaltypereference') ->
+ case match_parameters(Ext, S#state.parameters) of
+ Ext ->
+ #'Externaltypereference'{pos=Pos,module=Emod,type=Etype} = Ext,
+ case S#state.mname of
+ Emod -> % a local reference in this module
+ get_referenced1(S,Emod,Etype,Pos);
+ _ ->% always when multi file compiling
+ case lists:member(Emod,S#state.inputmodules) of
+ true ->
+ get_referenced1(S,Emod,Etype,Pos);
+ false ->
+ get_referenced(S,Emod,Etype,Pos)
+ end
+ end;
+ Other ->
+ {undefined,Other}
+ end;
+get_referenced_type(S=#state{mname=Emod},
+ ERef=#'Externalvaluereference'{pos=P,module=Emod,
+ value=Eval}) ->
+ case match_parameters(ERef,S#state.parameters) of
+ ERef ->
+ get_referenced1(S,Emod,Eval,P);
+ OtherERef when record(OtherERef,'Externalvaluereference') ->
+ get_referenced_type(S,OtherERef);
+ Value ->
+ {Emod,Value}
+ end;
+get_referenced_type(S,ERef=#'Externalvaluereference'{pos=Pos,module=Emod,
+ value=Eval}) ->
+ case match_parameters(ERef,S#state.parameters) of
+ ERef ->
+ case lists:member(Emod,S#state.inputmodules) of
+ true ->
+ get_referenced1(S,Emod,Eval,Pos);
+ false ->
+ get_referenced(S,Emod,Eval,Pos)
+ end;
+ OtherERef ->
+ get_referenced_type(S,OtherERef)
+ end;
+get_referenced_type(S,#identifier{val=Name,pos=Pos}) ->
+ get_referenced1(S,undefined,Name,Pos);
+get_referenced_type(_S,Type) ->
+ {undefined,Type}.
+
+%% get_referenced/3
+%% The referenced entity Ename may in case of an imported parameterized
+%% type reference imported entities in the other module, which implies that
+%% asn1_db:dbget will fail even though the referenced entity exists. Thus
+%% Emod may be the module that imports the entity Ename and not holds the
+%% data about Ename.
+get_referenced(S,Emod,Ename,Pos) ->
+ case asn1_db:dbget(Emod,Ename) of
+ undefined ->
+ %% May be an imported entity in module Emod
+% throw({error,{asn1,{undefined_type_or_value,{Emod,Ename}}}});
+ NewS = S#state{module=asn1_db:dbget(Emod,'MODULE')},
+ get_imported(NewS,Ename,Emod,Pos);
+ T when record(T,typedef) ->
+ Spec = T#typedef.typespec,
+ case Spec#type.def of
+ Tref when record(Tref,typereference) ->
+ Def = #'Externaltypereference'{module=Emod,
+ type=Tref#typereference.val,
+ pos=Tref#typereference.pos},
+
+
+ {Emod,T#typedef{typespec=Spec#type{def=Def}}};
+ _ ->
+ {Emod,T} % should add check that T is exported here
+ end;
+ V -> {Emod,V}
+ end.
+
+get_referenced1(S,ModuleName,Name,Pos) ->
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ %% ModuleName may be other than S#state.mname when
+ %% multi file compiling is used.
+ get_imported(S,Name,ModuleName,Pos);
+ T ->
+ {S#state.mname,T}
+ end.
+
+get_imported(S,Name,Module,Pos) ->
+ case imported(S,Name) of
+ {ok,Imodule} ->
+ case asn1_db:dbget(Imodule,'MODULE') of
+ undefined ->
+ throw({error,{asn1,{module_not_found,Imodule}}});
+ Im when record(Im,module) ->
+ case is_exported(Im,Name) of
+ false ->
+ throw({error,
+ {asn1,{not_exported,{Im,Name}}}});
+ _ ->
+ get_referenced_type(S,
+ #'Externaltypereference'
+ {module=Imodule,
+ type=Name,pos=Pos})
+ end
+ end;
+ _ ->
+ renamed_reference(S,Name,Module)
+ end.
+
+renamed_reference(S,Name,Module) ->
+ %% first check if there is a renamed type in this module
+ %% second check if any type was imported with this name
+ case ets:info(renamed_defs) of
+ undefined -> throw({error,{asn1,{undefined_type,Name}}});
+ _ ->
+ case ets:match(renamed_defs,{'$1',Name,Module}) of
+ [] ->
+ case ets:info(original_imports) of
+ undefined ->
+ throw({error,{asn1,{undefined_type,Name}}});
+ _ ->
+ case ets:match(original_imports,{Module,'$1'}) of
+ [] ->
+ throw({error,{asn1,{undefined_type,Name}}});
+ [[ImportsList]] ->
+ case get_importmoduleoftype(ImportsList,Name) of
+ undefined ->
+ throw({error,{asn1,{undefined_type,Name}}});
+ NextMod ->
+ renamed_reference(S,Name,NextMod)
+ end
+ end
+ end;
+ [[NewTypeName]] ->
+ get_referenced1(S,Module,NewTypeName,undefined)
+ end
+ end.
+
+get_importmoduleoftype([I|Is],Name) ->
+ Index = #'Externaltypereference'.type,
+ case lists:keysearch(Name,Index,I#'SymbolsFromModule'.symbols) of
+ {value,_Ref} ->
+ (I#'SymbolsFromModule'.module)#'Externaltypereference'.type;
+ _ ->
+ get_importmoduleoftype(Is,Name)
+ end;
+get_importmoduleoftype([],_) ->
+ undefined.
+
+
+match_parameters(Name,[]) ->
+ Name;
+
+match_parameters(#'Externaltypereference'{type=Name},[{#'Externaltypereference'{type=Name},NewName}|_T]) ->
+ NewName;
+match_parameters(#'Externaltypereference'{type=Name},[{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+ NewName;
+% match_parameters(#'Externaltypereference'{type=Name},[{#typereference{val=Name},NewName}|T]) ->
+% NewName;
+% match_parameters(#'Externaltypereference'{type=Name},[{{_,#typereference{val=Name}},NewName}|T]) ->
+% NewName;
+%match_parameters(#typereference{val=Name},[{#typereference{val=Name},NewName}|T]) ->
+% NewName;
+match_parameters(#'Externalvaluereference'{value=Name},[{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
+ NewName;
+match_parameters(#'Externalvaluereference'{value=Name},[{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
+ NewName;
+% match_parameters(#identifier{val=Name},[{#identifier{val=Name},NewName}|T]) ->
+% NewName;
+% match_parameters(#identifier{val=Name},[{{_,#identifier{val=Name}},NewName}|T]) ->
+% NewName;
+match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},{valueset,#type{def=NewName}}}|_T]) ->
+ NewName;
+match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+ NewName;
+% match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+% [{{_,#typereference{val=Name}},{valueset,#type{def=NewName}}}|T]) ->
+% NewName;
+% match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+% [{{_,#typereference{val=Name}},NewName}|T]) ->
+% NewName;
+
+match_parameters(Name, [_H|T]) ->
+ %%io:format("match_parameters(~p,~p)~n",[Name,[H|T]]),
+ match_parameters(Name,T).
+
+imported(S,Name) ->
+ {imports,Ilist} = (S#state.module)#module.imports,
+ imported1(Name,Ilist).
+
+imported1(Name,
+ [#'SymbolsFromModule'{symbols=Symlist,
+ module=#'Externaltypereference'{type=ModuleName}}|T]) ->
+ case lists:keysearch(Name,#'Externaltypereference'.type,Symlist) of
+ {value,_V} ->
+ {ok,ModuleName};
+ _ ->
+ imported1(Name,T)
+ end;
+imported1(_Name,[]) ->
+ false.
+
+
+check_integer(_S,[],_C) ->
+ ok;
+check_integer(S,NamedNumberList,_C) ->
+ case check_unique(NamedNumberList,2) of
+ [] ->
+ check_int(S,NamedNumberList,[]);
+ L when list(L) ->
+ error({type,{duplicates,L},S}),
+ unchanged
+
+ end.
+
+check_int(S,[{'NamedNumber',Id,Num}|T],Acc) when integer(Num) ->
+ check_int(S,T,[{Id,Num}|Acc]);
+check_int(S,[{'NamedNumber',Id,{identifier,_,Name}}|T],Acc) ->
+ Val = dbget_ex(S,S#state.mname,Name),
+ check_int(S,[{'NamedNumber',Id,Val#valuedef.value}|T],Acc);
+check_int(_S,[],Acc) ->
+ lists:keysort(2,Acc).
+
+
+
+check_bitstring(_S,[],_Constr) ->
+ [];
+check_bitstring(S,NamedNumberList,_Constr) ->
+ case check_unique(NamedNumberList,2) of
+ [] ->
+ check_bitstr(S,NamedNumberList,[]);
+ L when list(L) ->
+ error({type,{duplicates,L},S}),
+ unchanged
+ end.
+
+check_bitstr(S,[{'NamedNumber',Id,Num}|T],Acc)when integer(Num) ->
+ check_bitstr(S,T,[{Id,Num}|Acc]);
+check_bitstr(S,[{'NamedNumber',Id,Name}|T],Acc) when atom(Name) ->
+%%check_bitstr(S,[{'NamedNumber',Id,{identifier,_,Name}}|T],Acc) ->
+%% io:format("asn1ct_check:check_bitstr/3 hej hop ~w~n",[Name]),
+ Val = dbget_ex(S,S#state.mname,Name),
+%% io:format("asn1ct_check:check_bitstr/3: ~w~n",[Val]),
+ check_bitstr(S,[{'NamedNumber',Id,Val#valuedef.value}|T],Acc);
+check_bitstr(S,[],Acc) ->
+ case check_unique(Acc,2) of
+ [] ->
+ lists:keysort(2,Acc);
+ L when list(L) ->
+ error({type,{duplicate_values,L},S}),
+ unchanged
+ end.
+
+%%check_bitstring(S,NamedNumberList,Constr) ->
+%% NamedNumberList.
+
+%% Check INSTANCE OF
+%% check that DefinedObjectClass is of TYPE-IDENTIFIER class
+%% If Constraint is empty make it the general INSTANCE OF type
+%% If Constraint is not empty make an inlined type
+%% convert INSTANCE OF to the associated type
+check_instance_of(S,DefinedObjectClass,Constraint) ->
+ check_type_identifier(S,DefinedObjectClass),
+ iof_associated_type(S,Constraint).
+
+
+check_type_identifier(_S,'TYPE-IDENTIFIER') ->
+ ok;
+check_type_identifier(S,Eref=#'Externaltypereference'{}) ->
+ case get_referenced_type(S,Eref) of
+ {_,#classdef{name='TYPE-IDENTIFIER'}} -> ok;
+ {_,TD=#typedef{typespec=#type{def=#'Externaltypereference'{}}}} ->
+ check_type_identifier(S,(TD#typedef.typespec)#type.def);
+ _ ->
+ error({type,{"object set in type INSTANCE OF "
+ "not of class TYPE-IDENTIFIER",Eref},S})
+ end.
+
+iof_associated_type(S,[]) ->
+ %% in this case encode/decode functions for INSTANCE OF must be
+ %% generated
+ case get(instance_of) of
+ undefined ->
+ AssociateSeq = iof_associated_type1(S,[]),
+ Tag =
+ case S#state.erule of
+ ber_bin_v2 ->
+ [?TAG_CONSTRUCTED(?N_INSTANCE_OF)];
+ _ -> []
+ end,
+ TypeDef=#typedef{checked=true,
+ name='INSTANCE OF',
+ typespec=#type{tag=Tag,
+ def=AssociateSeq}},
+ asn1_db:dbput(S#state.mname,'INSTANCE OF',TypeDef),
+ put(instance_of,generate);
+ _ ->
+ ok
+ end,
+ #'Externaltypereference'{module=S#state.mname,type='INSTANCE OF'};
+iof_associated_type(S,C) ->
+ iof_associated_type1(S,C).
+
+iof_associated_type1(S,C) ->
+ {TableCInf,Comp1Cnstr,Comp2Cnstr,Comp2tablecinf}=
+ instance_of_constraints(S,C),
+
+ ModuleName = S#state.mname,
+ Typefield_type=
+ case C of
+ [] -> 'ASN1_OPEN_TYPE';
+ _ -> {typefield,'Type'}
+ end,
+ {ObjIdTag,C1TypeTag}=
+ case S#state.erule of
+ ber_bin_v2 ->
+ {[{'UNIVERSAL',8}],
+ [#tag{class='UNIVERSAL',
+ number=6,
+ type='IMPLICIT',
+ form=0}]};
+ _ -> {[{'UNIVERSAL','INTEGER'}],[]}
+ end,
+ TypeIdentifierRef=#'Externaltypereference'{module=ModuleName,
+ type='TYPE-IDENTIFIER'},
+ ObjectIdentifier =
+ #'ObjectClassFieldType'{classname=TypeIdentifierRef,
+ class=[],
+ fieldname={id,[]},
+ type={fixedtypevaluefield,id,
+ #type{def='OBJECT IDENTIFIER'}}},
+ Typefield =
+ #'ObjectClassFieldType'{classname=TypeIdentifierRef,
+ class=[],
+ fieldname={'Type',[]},
+ type=Typefield_type},
+ IOFComponents =
+ [#'ComponentType'{name='type-id',
+ typespec=#type{tag=C1TypeTag,
+ def=ObjectIdentifier,
+ constraint=Comp1Cnstr},
+ prop=mandatory,
+ tags=ObjIdTag},
+ #'ComponentType'{name=value,
+ typespec=#type{tag=[#tag{class='CONTEXT',
+ number=0,
+ type='EXPLICIT',
+ form=32}],
+ def=Typefield,
+ constraint=Comp2Cnstr,
+ tablecinf=Comp2tablecinf},
+ prop=mandatory,
+ tags=[{'CONTEXT',0}]}],
+ #'SEQUENCE'{tablecinf=TableCInf,
+ components=IOFComponents}.
+
+
+%% returns the leading attribute, the constraint of the components and
+%% the tablecinf value for the second component.
+instance_of_constraints(_,[]) ->
+ {false,[],[],[]};
+instance_of_constraints(S,#constraint{c={simpletable,Type}}) ->
+ #type{def=#'Externaltypereference'{type=Name}} = Type,
+ ModuleName = S#state.mname,
+ ObjectSetRef=#'Externaltypereference'{module=ModuleName,
+ type=Name},
+ CRel=[{componentrelation,{objectset,
+ undefined, %% pos
+ ObjectSetRef},
+ [{innermost,
+ [#'Externalvaluereference'{module=ModuleName,
+ value=type}]}]}],
+ TableCInf=#simpletableattributes{objectsetname=Name,
+ c_name='type-id',
+ c_index=1,
+ usedclassfield=id,
+ uniqueclassfield=id,
+ valueindex=[]},
+ {TableCInf,[{simpletable,Name}],CRel,[{objfun,ObjectSetRef}]}.
+
+%% Check ENUMERATED
+%% ****************************************
+%% Check that all values are unique
+%% assign values to un-numbered identifiers
+%% check that the constraints are allowed and correct
+%% put the updated info back into database
+check_enumerated(_S,[{Name,Number}|Rest],_Constr) when atom(Name), integer(Number)->
+ %% already checked , just return the same list
+ [{Name,Number}|Rest];
+check_enumerated(S,NamedNumberList,_Constr) ->
+ check_enum(S,NamedNumberList,[],[]).
+
+%% identifiers are put in Acc2
+%% returns either [{Name,Number}] or {[{Name,Number}],[{ExtName,ExtNumber}]}
+%% the latter is returned if the ENUMERATION contains EXTENSIONMARK
+check_enum(S,[{'NamedNumber',Id,Num}|T],Acc1,Acc2) when integer(Num) ->
+ check_enum(S,T,[{Id,Num}|Acc1],Acc2);
+check_enum(S,[{'NamedNumber',Id,{identifier,_,Name}}|T],Acc1,Acc2) ->
+ Val = dbget_ex(S,S#state.mname,Name),
+ check_enum(S,[{'NamedNumber',Id,Val#valuedef.value}|T],Acc1,Acc2);
+check_enum(S,['EXTENSIONMARK'|T],Acc1,Acc2) ->
+ NewAcc2 = lists:keysort(2,Acc1),
+ NewList = enum_number(lists:reverse(Acc2),NewAcc2,0,[]),
+ { NewList, check_enum(S,T,[],[])};
+check_enum(S,[Id|T],Acc1,Acc2) when atom(Id) ->
+ check_enum(S,T,Acc1,[Id|Acc2]);
+check_enum(_S,[],Acc1,Acc2) ->
+ NewAcc2 = lists:keysort(2,Acc1),
+ enum_number(lists:reverse(Acc2),NewAcc2,0,[]).
+
+
+% assign numbers to identifiers , numbers from 0 ... but must not
+% be the same as already assigned to NamedNumbers
+enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num > Cnt ->
+ enum_number(T,[{Id,Num}|T2],Cnt+1,[{H,Cnt}|Acc]);
+enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num < Cnt -> % negative Num
+ enum_number(T,T2,Cnt+1,[{H,Cnt},{Id,Num}|Acc]);
+enum_number([],L2,_Cnt,Acc) ->
+ lists:concat([lists:reverse(Acc),L2]);
+enum_number(L,[{Id,Num}|T2],Cnt,Acc) -> % Num == Cnt
+ enum_number(L,T2,Cnt+1,[{Id,Num}|Acc]);
+enum_number([H|T],[],Cnt,Acc) ->
+ enum_number(T,[],Cnt+1,[{H,Cnt}|Acc]).
+
+
+check_boolean(_S,_Constr) ->
+ ok.
+
+check_octetstring(_S,_Constr) ->
+ ok.
+
+% check all aspects of a SEQUENCE
+% - that all component names are unique
+% - that all TAGS are ok (when TAG default is applied)
+% - that each component is of a valid type
+% - that the extension marks are valid
+
+check_sequence(S,Type,Comps) ->
+ Components = expand_components(S,Comps),
+ case check_unique([C||C <- Components ,record(C,'ComponentType')]
+ ,#'ComponentType'.name) of
+ [] ->
+ %% sort_canonical(Components),
+ Components2 = maybe_automatic_tags(S,Components),
+ %% check the table constraints from here. The outermost type
+ %% is Type, the innermost is Comps (the list of components)
+ NewComps =
+ case check_each_component(S,Type,Components2) of
+ NewComponents when list(NewComponents) ->
+ check_unique_sequence_tags(S,NewComponents),
+ NewComponents;
+ Ret = {NewComponents,NewEcomps} ->
+ TagComps = NewComponents ++
+ [Comp#'ComponentType'{prop='OPTIONAL'}|| Comp <- NewEcomps],
+ %% extension components are like optionals when it comes to tagging
+ check_unique_sequence_tags(S,TagComps),
+ Ret
+ end,
+ %% CRelInf is the "leading attribute" information
+ %% necessary for code generating of the look up in the
+ %% object set table,
+ %% i.e. getenc_ObjectSet/getdec_ObjectSet.
+ %% {objfun,ERef} tuple added in NewComps2 in tablecinf
+ %% field in type record of component relation constrained
+ %% type
+% io:format("NewComps: ~p~n",[NewComps]),
+ {CRelInf,NewComps2} = componentrelation_leadingattr(S,NewComps),
+% io:format("CRelInf: ~p~n",[CRelInf]),
+% io:format("NewComps2: ~p~n",[NewComps2]),
+ %% CompListWithTblInf has got a lot unecessary info about
+ %% the involved class removed, as the class of the object
+ %% set.
+ CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps2),
+% io:format("CompListWithTblInf: ~p~n",[CompListWithTblInf]),
+ {CRelInf,CompListWithTblInf};
+ Dupl ->
+ throw({error,{asn1,{duplicate_components,Dupl}}})
+ end.
+
+expand_components(S, [{'COMPONENTS OF',Type}|T]) ->
+ CompList =
+ case get_referenced_type(S,Type#type.def) of
+ {_,#typedef{typespec=#type{def=Seq}}} when record(Seq,'SEQUENCE') ->
+ case Seq#'SEQUENCE'.components of
+ {Root,_Ext} -> Root;
+ Root -> Root
+ end;
+ Err -> throw({error,{asn1,{illegal_COMPONENTS_OF,Err}}})
+ end,
+ expand_components(S,CompList) ++ expand_components(S,T);
+expand_components(S,[H|T]) ->
+ [H|expand_components(S,T)];
+expand_components(_,[]) ->
+ [].
+
+check_unique_sequence_tags(S,[#'ComponentType'{prop=mandatory}|Rest]) ->
+ check_unique_sequence_tags(S,Rest);
+check_unique_sequence_tags(S,[C|Rest]) when record(C,'ComponentType') ->
+ check_unique_sequence_tags1(S,Rest,[C]);% optional or default
+check_unique_sequence_tags(S,[_ExtensionMarker|Rest]) ->
+ check_unique_sequence_tags(S,Rest);
+check_unique_sequence_tags(_S,[]) ->
+ true.
+
+check_unique_sequence_tags1(S,[C|Rest],Acc) when record(C,'ComponentType') ->
+ case C#'ComponentType'.prop of
+ mandatory ->
+ check_unique_tags(S,lists:reverse([C|Acc])),
+ check_unique_sequence_tags(S,Rest);
+ _ ->
+ check_unique_sequence_tags1(S,Rest,[C|Acc]) % default or optional
+ end;
+check_unique_sequence_tags1(S,[H|Rest],Acc) ->
+ check_unique_sequence_tags1(S,Rest,[H|Acc]);
+check_unique_sequence_tags1(S,[],Acc) ->
+ check_unique_tags(S,lists:reverse(Acc)).
+
+check_sequenceof(S,Type,Component) when record(Component,type) ->
+ check_type(S,Type,Component).
+
+check_set(S,Type,Components) ->
+ {TableCInf,NewComponents} = check_sequence(S,Type,Components),
+ case lists:member(der,S#state.options) of
+ true when S#state.erule == ber;
+ S#state.erule == ber_bin ->
+ {Sorted,SortedComponents} =
+ sort_components(S#state.tname,
+ (S#state.module)#module.tagdefault,
+ NewComponents),
+ {Sorted,TableCInf,SortedComponents};
+ _ ->
+ {false,TableCInf,NewComponents}
+ end.
+
+sort_components(_TypeName,'AUTOMATIC',Components) ->
+ {true,Components};
+sort_components(TypeName,_TagDefault,Components) ->
+ case untagged_choice(Components) of
+ false ->
+ {true,sort_components1(TypeName,Components,[],[],[],[])};
+ true ->
+ {dynamic,Components} % sort in run-time
+ end.
+
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'UNIVERSAL',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,[C|UnivAcc],ApplAcc,ContAcc,PrivAcc);
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'APPLICATION',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,UnivAcc,[C|ApplAcc],ContAcc,PrivAcc);
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'CONTEXT',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,UnivAcc,ApplAcc,[C|ContAcc],PrivAcc);
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'PRIVATE',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,UnivAcc,ApplAcc,ContAcc,[C|PrivAcc]);
+sort_components1(TypeName,[],UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ I = #'ComponentType'.tags,
+ ascending_order_check(TypeName,sort_universal_type(UnivAcc)) ++
+ ascending_order_check(TypeName,lists:keysort(I,ApplAcc)) ++
+ ascending_order_check(TypeName,lists:keysort(I,ContAcc)) ++
+ ascending_order_check(TypeName,lists:keysort(I,PrivAcc)).
+
+ascending_order_check(TypeName,Components) ->
+ ascending_order_check1(TypeName,Components),
+ Components.
+
+ascending_order_check1(TypeName,
+ [C1 = #'ComponentType'{tags=[{_,T}|_]},
+ C2 = #'ComponentType'{tags=[{_,T}|_]}|Rest]) ->
+ io:format("WARNING: Indistinct tag ~p in SET ~p, components ~p and ~p~n",
+ [T,TypeName,C1#'ComponentType'.name,C2#'ComponentType'.name]),
+ ascending_order_check1(TypeName,[C2|Rest]);
+ascending_order_check1(TypeName,
+ [C1 = #'ComponentType'{tags=[{'UNIVERSAL',T1}|_]},
+ C2 = #'ComponentType'{tags=[{'UNIVERSAL',T2}|_]}|Rest]) ->
+ case (asn1ct_gen_ber:decode_type(T1) == asn1ct_gen_ber:decode_type(T2)) of
+ true ->
+ io:format("WARNING: Indistinct tags ~p and ~p in"
+ " SET ~p, components ~p and ~p~n",
+ [T1,T2,TypeName,C1#'ComponentType'.name,
+ C2#'ComponentType'.name]),
+ ascending_order_check1(TypeName,[C2|Rest]);
+ _ ->
+ ascending_order_check1(TypeName,[C2|Rest])
+ end;
+ascending_order_check1(N,[_|Rest]) ->
+ ascending_order_check1(N,Rest);
+ascending_order_check1(_,[_]) ->
+ ok;
+ascending_order_check1(_,[]) ->
+ ok.
+
+sort_universal_type(Components) ->
+ List = lists:map(fun(C) ->
+ #'ComponentType'{tags=[{_,T}|_]} = C,
+ {asn1ct_gen_ber:decode_type(T),C}
+ end,
+ Components),
+ SortedList = lists:keysort(1,List),
+ lists:map(fun(X)->element(2,X) end,SortedList).
+
+untagged_choice([#'ComponentType'{typespec=#type{tag=[],def={'CHOICE',_}}}|_Rest]) ->
+ true;
+untagged_choice([_|Rest]) ->
+ untagged_choice(Rest);
+untagged_choice([]) ->
+ false.
+
+check_setof(S,Type,Component) when record(Component,type) ->
+ check_type(S,Type,Component).
+
+check_restrictedstring(_S,_Def,_Constr) ->
+ ok.
+
+check_objectidentifier(_S,_Constr) ->
+ ok.
+
+% check all aspects of a CHOICE
+% - that all alternative names are unique
+% - that all TAGS are ok (when TAG default is applied)
+% - that each alternative is of a valid type
+% - that the extension marks are valid
+check_choice(S,Type,Components) when list(Components) ->
+ case check_unique([C||C <- Components,
+ record(C,'ComponentType')],#'ComponentType'.name) of
+ [] ->
+ %% sort_canonical(Components),
+ Components2 = maybe_automatic_tags(S,Components),
+ %NewComps =
+ case check_each_alternative(S,Type,Components2) of
+ {NewComponents,NewEcomps} ->
+ check_unique_tags(S,NewComponents ++ NewEcomps),
+ {NewComponents,NewEcomps};
+ NewComponents ->
+ check_unique_tags(S,NewComponents),
+ NewComponents
+ end;
+%% CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps);
+ Dupl ->
+ throw({error,{asn1,{duplicate_choice_alternatives,Dupl}}})
+ end;
+check_choice(_S,_,[]) ->
+ [].
+
+%% probably dead code that should be removed
+%%maybe_automatic_tags(S,{Rc,Ec}) ->
+%% {maybe_automatic_tags1(S,Rc,0),maybe_automatic_tags1(S,Ec,length(Rc))};
+maybe_automatic_tags(#state{erule=per},C) ->
+ C;
+maybe_automatic_tags(#state{erule=per_bin},C) ->
+ C;
+maybe_automatic_tags(S,C) ->
+ maybe_automatic_tags1(S,C,0).
+
+maybe_automatic_tags1(S,C,TagNo) ->
+ case (S#state.module)#module.tagdefault of
+ 'AUTOMATIC' ->
+ generate_automatic_tags(S,C,TagNo);
+ _ ->
+ %% maybe is the module a multi file module were only some of
+ %% the modules have defaulttag AUTOMATIC TAGS then the names
+ %% of those types are saved in the table automatic_tags
+ Name= S#state.tname,
+ case is_automatic_tagged_in_multi_file(Name) of
+ true ->
+ generate_automatic_tags(S,C,TagNo);
+ false ->
+ C
+ end
+ end.
+
+is_automatic_tagged_in_multi_file(Name) ->
+ case ets:info(automatic_tags) of
+ undefined ->
+ %% this case when not multifile compilation
+ false;
+ _ ->
+ case ets:member(automatic_tags,Name) of
+ true ->
+ true;
+ _ ->
+ false
+ end
+ end.
+
+generate_automatic_tags(_S,C,TagNo) ->
+ case any_manual_tag(C) of
+ true ->
+ C;
+ false ->
+ generate_automatic_tags1(C,TagNo)
+ end.
+
+generate_automatic_tags1([H|T],TagNo) when record(H,'ComponentType') ->
+ #'ComponentType'{typespec=Ts} = H,
+ NewTs = Ts#type{tag=[#tag{class='CONTEXT',
+ number=TagNo,
+ type={default,'IMPLICIT'},
+ form= 0 }]}, % PRIMITIVE
+ [H#'ComponentType'{typespec=NewTs}|generate_automatic_tags1(T,TagNo+1)];
+generate_automatic_tags1([ExtMark|T],TagNo) -> % EXTENSIONMARK
+ [ExtMark | generate_automatic_tags1(T,TagNo)];
+generate_automatic_tags1([],_) ->
+ [].
+
+any_manual_tag([#'ComponentType'{typespec=#type{tag=[]}}|Rest]) ->
+ any_manual_tag(Rest);
+any_manual_tag([{'EXTENSIONMARK',_,_}|Rest]) ->
+ any_manual_tag(Rest);
+any_manual_tag([_|_Rest]) ->
+ true;
+any_manual_tag([]) ->
+ false.
+
+
+check_unique_tags(S,C) ->
+ case (S#state.module)#module.tagdefault of
+ 'AUTOMATIC' ->
+ case any_manual_tag(C) of
+ false -> true;
+ _ -> collect_and_sort_tags(C,[])
+ end;
+ _ ->
+ collect_and_sort_tags(C,[])
+ end.
+
+collect_and_sort_tags([C|Rest],Acc) when record(C,'ComponentType') ->
+ collect_and_sort_tags(Rest,C#'ComponentType'.tags ++ Acc);
+collect_and_sort_tags([_|Rest],Acc) ->
+ collect_and_sort_tags(Rest,Acc);
+collect_and_sort_tags([],Acc) ->
+ {Dupl,_}= lists:mapfoldl(fun(El,El)->{{dup,El},El};(El,_Prev)-> {El,El} end,notag,lists:sort(Acc)),
+ Dupl2 = [Dup|| {dup,Dup} <- Dupl],
+ if
+ length(Dupl2) > 0 ->
+ throw({error,{asn1,{duplicates_of_the_tags,Dupl2}}});
+ true ->
+ true
+ end.
+
+check_unique(L,Pos) ->
+ Slist = lists:keysort(Pos,L),
+ check_unique2(Slist,Pos,[]).
+
+check_unique2([A,B|T],Pos,Acc) when element(Pos,A) == element(Pos,B) ->
+ check_unique2([B|T],Pos,[element(Pos,B)|Acc]);
+check_unique2([_|T],Pos,Acc) ->
+ check_unique2(T,Pos,Acc);
+check_unique2([],_,Acc) ->
+ lists:reverse(Acc).
+
+check_each_component(S,Type,{Rlist,ExtList}) ->
+ {check_each_component(S,Type,Rlist),
+ check_each_component(S,Type,ExtList)};
+check_each_component(S,Type,Components) ->
+ check_each_component(S,Type,Components,[],[],noext).
+
+check_each_component(S = #state{abscomppath=Path,recordtopname=TopName},Type,
+ [C|Ct],Acc,Extacc,Ext) when record(C,'ComponentType') ->
+ #'ComponentType'{name=Cname,typespec=Ts,prop=Prop} = C,
+ NewAbsCPath =
+ case Ts#type.def of
+ #'Externaltypereference'{} -> [];
+ _ -> [Cname|Path]
+ end,
+ CheckedTs = check_type(S#state{abscomppath=NewAbsCPath,
+ recordtopname=[Cname|TopName]},Type,Ts),
+ NewTags = get_taglist(S,CheckedTs),
+
+ NewProp =
+% case lists:member(der,S#state.options) of
+% true ->
+% True ->
+ case normalize_value(S,CheckedTs,Prop,[Cname|TopName]) of
+ mandatory -> mandatory;
+ 'OPTIONAL' -> 'OPTIONAL';
+ DefaultValue -> {'DEFAULT',DefaultValue}
+ end,
+% _ ->
+% Prop
+% end,
+ NewC = C#'ComponentType'{typespec=CheckedTs,prop=NewProp,tags=NewTags},
+ case Ext of
+ noext ->
+ check_each_component(S,Type,Ct,[NewC|Acc],Extacc,Ext);
+ ext ->
+ check_each_component(S,Type,Ct,Acc,[NewC|Extacc],Ext)
+ end;
+check_each_component(S,Type,[_|Ct],Acc,Extacc,noext) -> % skip 'EXTENSIONMARK'
+ check_each_component(S,Type,Ct,Acc,Extacc,ext);
+check_each_component(_S,_,[_C|_Ct],_,_,ext) -> % skip 'EXTENSIONMARK'
+ throw({error,{asn1,{too_many_extension_marks}}});
+check_each_component(_S,_,[],Acc,Extacc,ext) ->
+ {lists:reverse(Acc),lists:reverse(Extacc)};
+check_each_component(_S,_,[],Acc,_,noext) ->
+ lists:reverse(Acc).
+
+check_each_alternative(S,Type,{Rlist,ExtList}) ->
+ {check_each_alternative(S,Type,Rlist),
+ check_each_alternative(S,Type,ExtList)};
+check_each_alternative(S,Type,[C|Ct]) ->
+ check_each_alternative(S,Type,[C|Ct],[],[],noext).
+
+check_each_alternative(S=#state{abscomppath=Path,recordtopname=TopName},Type,[C|Ct],
+ Acc,Extacc,Ext) when record(C,'ComponentType') ->
+ #'ComponentType'{name=Cname,typespec=Ts,prop=_Prop} = C,
+ NewAbsCPath =
+ case Ts#type.def of
+ #'Externaltypereference'{} -> [];
+ _ -> [Cname|Path]
+ end,
+ NewState =
+ S#state{abscomppath=NewAbsCPath,recordtopname=[Cname|TopName]},
+ CheckedTs = check_type(NewState,Type,Ts),
+ NewTags = get_taglist(S,CheckedTs),
+ NewC = C#'ComponentType'{typespec=CheckedTs,tags=NewTags},
+ case Ext of
+ noext ->
+ check_each_alternative(S,Type,Ct,[NewC|Acc],Extacc,Ext);
+ ext ->
+ check_each_alternative(S,Type,Ct,Acc,[NewC|Extacc],Ext)
+ end;
+
+check_each_alternative(S,Type,[_|Ct],Acc,Extacc,noext) -> % skip 'EXTENSIONMARK'
+ check_each_alternative(S,Type,Ct,Acc,Extacc,ext);
+check_each_alternative(_S,_,[_C|_Ct],_,_,ext) -> % skip 'EXTENSIONMARK'
+ throw({error,{asn1,{too_many_extension_marks}}});
+check_each_alternative(_S,_,[],Acc,Extacc,ext) ->
+ {lists:reverse(Acc),lists:reverse(Extacc)};
+check_each_alternative(_S,_,[],Acc,_,noext) ->
+ lists:reverse(Acc).
+
+%% componentrelation_leadingattr/2 searches the structure for table
+%% constraints, if any is found componentrelation_leadingattr/5 is
+%% called.
+componentrelation_leadingattr(S,CompList) ->
+% {Cs1,Cs2} =
+ Cs =
+ case CompList of
+ {Components,EComponents} when list(Components) ->
+% {Components,Components};
+ Components ++ EComponents;
+ CompList when list(CompList) ->
+% {CompList,CompList}
+ CompList
+ end,
+% case any_simple_table(S,Cs1,[]) of
+
+ %% get_simple_table_if_used/2 should find out whether there are any
+ %% component relation constraints in the entire tree of Cs1 that
+ %% relates to this level. It returns information about the simple
+ %% table constraint necessary for the the call to
+ %% componentrelation_leadingattr/6. The step when the leading
+ %% attribute and the syntax tree is modified to support the code
+ %% generating.
+ case get_simple_table_if_used(S,Cs) of
+ [] -> {false,CompList};
+ STList ->
+% componentrelation_leadingattr(S,Cs1,Cs2,STList,[],[])
+ componentrelation_leadingattr(S,Cs,Cs,STList,[],[])
+ end.
+
+%% componentrelation_leadingattr/6 when all components are searched
+%% the new modified components are returned together with the "leading
+%% attribute" information, which later is stored in the tablecinf
+%% field in the SEQUENCE/SET record. The "leading attribute"
+%% information is used to generate the lookup in the object set
+%% table. The other information gathered in the #type.tablecinf field
+%% is used in code generating phase too, to recognice the proper
+%% components for "open type" encoding and to propagate the result of
+%% the object set lookup when needed.
+componentrelation_leadingattr(_,[],_CompList,_,[],NewCompList) ->
+ {false,lists:reverse(NewCompList)};
+componentrelation_leadingattr(_,[],_CompList,_,LeadingAttr,NewCompList) ->
+ {lists:last(LeadingAttr),lists:reverse(NewCompList)}; %send all info in Ts later
+componentrelation_leadingattr(S,[C|Cs],CompList,STList,Acc,CompAcc) ->
+ {LAAcc,NewC} =
+ case catch componentrelation1(S,C#'ComponentType'.typespec,
+ [C#'ComponentType'.name]) of
+ {'EXIT',_} ->
+ {[],C};
+ {CRI=[{_A1,_B1,_C1,_D1}|_Rest],NewTSpec} ->
+ %% {ObjectSet,AtPath,ClassDef,Path}
+ %% _A1 is a reference to the object set of the
+ %% component relation constraint.
+ %% _B1 is the path of names in the at-list of the
+ %% component relation constraint.
+ %% _C1 is the class definition of the
+ %% ObjectClassFieldType.
+ %% _D1 is the path of components that was traversed to
+ %% find this constraint.
+ case leading_attr_index(S,CompList,CRI,
+ lists:reverse(S#state.abscomppath),[]) of
+ [] ->
+ {[],C};
+ [{ObjSet,Attr,N,ClassDef,_Path,ValueIndex}|_NewRest] ->
+ OS = object_set_mod_name(S,ObjSet),
+ UniqueFieldName =
+ case (catch get_unique_fieldname(#classdef{typespec=ClassDef})) of
+ {error,'__undefined_'} ->
+ no_unique;
+ {asn1,Msg,_} ->
+ error({type,Msg,S});
+ Other -> Other
+ end,
+% UsedFieldName = get_used_fieldname(S,Attr,STList),
+ %% Res should be done differently: even though
+ %% a unique field name exists it is not
+ %% certain that the ObjectClassFieldType of
+ %% the simple table constraint picks that
+ %% class field.
+ Res = #simpletableattributes{objectsetname=OS,
+%% c_name=asn1ct_gen:un_hyphen_var(Attr),
+ c_name=Attr,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex},
+ {[Res],C#'ComponentType'{typespec=NewTSpec}}
+ end;
+ _ ->
+ %% no constraint was found
+ {[],C}
+ end,
+ componentrelation_leadingattr(S,Cs,CompList,STList,LAAcc++Acc,
+ [NewC|CompAcc]).
+
+object_set_mod_name(_S,ObjSet) when atom(ObjSet) ->
+ ObjSet;
+object_set_mod_name(#state{mname=M},
+ #'Externaltypereference'{module=M,type=T}) ->
+ T;
+object_set_mod_name(S,#'Externaltypereference'{module=M,type=T}) ->
+ case lists:member(M,S#state.inputmodules) of
+ true ->
+ T;
+ false ->
+ {M,T}
+ end.
+
+%% get_used_fieldname gets the used field of the class referenced by
+%% the ObjectClassFieldType construct in the simple table constraint
+%% corresponding to the component relation constraint that depends on
+%% it.
+% get_used_fieldname(_S,CName,[{[CName|_Rest],_,ClFieldName}|_RestSimpleT]) ->
+% ClFieldName;
+% get_used_fieldname(S,CName,[_SimpleTC|Rest]) ->
+% get_used_fieldname(S,CName,Rest);
+% get_used_fieldname(S,_,[]) ->
+% error({type,"Error in Simple table constraint",S}).
+
+%% any_simple_table/3 checks if any of the components on this level is
+%% constrained by a simple table constraint. It returns a list of
+%% tuples with three elements. It is a name path to the place in the
+%% type structure where the constraint is, and the name of the object
+%% set and the referenced field in the class.
+% any_simple_table(S = #state{mname=M,abscomppath=Path},
+% [#'ComponentType'{name=Name,typespec=Type}|Cs],Acc) ->
+% Constraint = Type#type.constraint,
+% case lists:keysearch(simpletable,1,Constraint) of
+% {value,{_,#type{def=Ref}}} ->
+% %% This ObjectClassFieldType, which has a simple table
+% %% constraint, must pick a fixed type value, mustn't it ?
+% {ClassDef,[{_,ClassFieldName}]} = Type#type.def,
+% ST =
+% case Ref of
+% #'Externaltypereference'{module=M,type=ObjSetName} ->
+% {[Name|Path],ObjSetName,ClassFieldName};
+% _ ->
+% {[Name|Path],Ref,ClassFieldName}
+% end,
+% any_simple_table(S,Cs,[ST|Acc]);
+% false ->
+% any_simple_table(S,Cs,Acc)
+% end;
+% any_simple_table(_,[],Acc) ->
+% lists:reverse(Acc);
+% any_simple_table(S,[_|Cs],Acc) ->
+% any_simple_table(S,Cs,Acc).
+
+%% get_simple_table_if_used/2 searches the structure of Cs for any
+%% component relation constraints due to the present level of the
+%% structure. If there are any, the necessary information for code
+%% generation of the look up functionality in the object set table are
+%% returned.
+get_simple_table_if_used(S,Cs) ->
+ CNames = lists:map(fun(#'ComponentType'{name=Name}) -> Name;
+ (_) -> [] %% in case of extension marks
+ end,
+ Cs),
+ RefedSimpleTable=any_component_relation(S,Cs,CNames,[],[]),
+ get_simple_table_info(S,Cs,remove_doubles(RefedSimpleTable)).
+
+remove_doubles(L) ->
+ remove_doubles(L,[]).
+remove_doubles([H|T],Acc) ->
+ NewT = remove_doubles1(H,T),
+ remove_doubles(NewT,[H|Acc]);
+remove_doubles([],Acc) ->
+ Acc.
+
+remove_doubles1(El,L) ->
+ case lists:delete(El,L) of
+ L -> L;
+ NewL -> remove_doubles1(El,NewL)
+ end.
+
+%% get_simple_table_info searches the commponents Cs by the path from
+%% an at-list (third argument), and follows into a component of it if
+%% necessary, to get information needed for code generating.
+%%
+%% Returns a list of tuples with three elements. It holds a list of
+%% atoms that is the path, the name of the field of the class that are
+%% referred to in the ObjectClassFieldType, and the name of the unique
+%% field of the class of the ObjectClassFieldType.
+%%
+% %% The level information outermost/innermost must be kept. There are
+% %% at least two possibilities to cover here for an outermost case: 1)
+% %% Both the simple table and the component relation have a common path
+% %% at least one step below the outermost level, i.e. the leading
+% %% information shall be on a sub level. 2) They don't have any common
+% %% path.
+get_simple_table_info(S,Cs,[AtList|Rest]) ->
+%% [get_simple_table_info1(S,Cs,AtList,S#state.abscomppath)|get_simple_table_info(S,Cs,Rest)];
+ [get_simple_table_info1(S,Cs,AtList,[])|get_simple_table_info(S,Cs,Rest)];
+get_simple_table_info(_,_,[]) ->
+ [].
+get_simple_table_info1(S,Cs,[Cname|Cnames],Path) when list(Cs) ->
+ case lists:keysearch(Cname,#'ComponentType'.name,Cs) of
+ {value,C} ->
+ get_simple_table_info1(S,C,Cnames,[Cname|Path]);
+ _ ->
+ error({type,"Missing expected simple table constraint",S})
+ end;
+get_simple_table_info1(S,#'ComponentType'{typespec=TS},[],Path) ->
+ %% In this component there must be a simple table constraint
+ %% o.w. the asn1 code is wrong.
+ #type{def=OCFT,constraint=Cnstr} = TS,
+ case Cnstr of
+ [{simpletable,_OSRef}]�->
+ #'ObjectClassFieldType'{classname=ClRef,
+ class=ObjectClass,
+ fieldname=FieldName} = OCFT,
+% #'ObjectClassFieldType'{ObjectClass,FieldType} = ObjectClassFieldType,
+ ObjectClassFieldName =
+ case FieldName of
+ {LastFieldName,[]} -> LastFieldName;
+ {_FirstFieldName,FieldNames} ->
+ lists:last(FieldNames)
+ end,
+ %%ObjectClassFieldName is the last element in the dotted
+ %%list of the ObjectClassFieldType. The last element may
+ %%be of another class, that is referenced from the class
+ %%of the ObjectClassFieldType
+ ClassDef =
+ case ObjectClass of
+ [] ->
+ {_,CDef}=get_referenced_type(S,ClRef),
+ CDef;
+ _ -> #classdef{typespec=ObjectClass}
+ end,
+ UniqueName =
+ case (catch get_unique_fieldname(ClassDef)) of
+ {error,'__undefined_'} -> no_unique;
+ {asn1,Msg,_} ->
+ error({type,Msg,S});
+ Other -> Other
+ end,
+ {lists:reverse(Path),ObjectClassFieldName,UniqueName};
+ _ ->
+ error({type,{asn1,"missing expected simple table constraint",
+ Cnstr},S})
+ end;
+get_simple_table_info1(S,#'ComponentType'{typespec=TS},Cnames,Path) ->
+ Components = get_atlist_components(TS#type.def),
+ get_simple_table_info1(S,Components,Cnames,Path).
+
+%% any_component_relation searches for all component relation
+%% constraints that refers to the actual level and returns a list of
+%% the "name path" in the at-list to the component relation constraint
+%% that must refer to a simple table constraint. The list is empty if
+%% no component relation constraints were found.
+%%
+%% NamePath has the names of all components that are followed from the
+%% beginning of the search. CNames holds the names of all components
+%% of the start level, this info is used if an outermost at-notation
+%% is found to check the validity of the at-list.
+any_component_relation(S,[C|Cs],CNames,NamePath,Acc) ->
+ CName = C#'ComponentType'.name,
+ Type = C#'ComponentType'.typespec,
+ CRelPath =
+ case Type#type.constraint of
+ [{componentrelation,_,AtNotation}] ->
+ %% Found component relation constraint, now check
+ %% whether this constraint is relevant for the level
+ %% where the search started
+ AtNot = extract_at_notation(AtNotation),
+ %% evaluate_atpath returns the relative path to the
+ %% simple table constraint from where the component
+ %% relation is found.
+ evaluate_atpath(S#state.abscomppath,NamePath,CNames,AtNot);
+ _ ->
+ []
+ end,
+ InnerAcc =
+ case {Type#type.inlined,
+ asn1ct_gen:type(asn1ct_gen:get_inner(Type#type.def))} of
+ {no,{constructed,bif}} ->
+ InnerCs =
+ case get_components(Type#type.def) of
+ {IC1,_IC2} -> IC1 ++ IC1;
+ IC -> IC
+ end,
+ %% here we are interested in components of an
+ %% SEQUENCE/SET OF as well as SEQUENCE, SET and CHOICE
+ any_component_relation(S,InnerCs,CNames,[CName|NamePath],[]);
+ _ ->
+ []
+ end,
+ any_component_relation(S,Cs,CNames,NamePath,InnerAcc++CRelPath++Acc);
+any_component_relation(_,[],_,_,Acc) ->
+ Acc.
+
+%% evaluate_atpath/4 finds out whether the at notation refers to the
+%% search level. The list of referenced names in the AtNot list shall
+%% begin with a name that exists on the level it refers to. If the
+%% found AtPath is refering to the same sub-branch as the simple table
+%% has, then there shall not be any leading attribute info on this
+%% level.
+evaluate_atpath(_,[],Cnames,{innermost,AtPath=[Ref|_Refs]}) ->
+ %% any innermost constraint found deeper in the structure is
+ %% ignored.
+ case lists:member(Ref,Cnames) of
+ true -> [AtPath];
+ false -> []
+ end;
+%% In this case must check that the AtPath doesn't step any step of
+%% the NamePath, in that case the constraint will be handled in an
+%% inner level.
+evaluate_atpath(TopPath,NamePath,Cnames,{outermost,AtPath=[_Ref|_Refs]}) ->
+ AtPathBelowTop =
+ case TopPath of
+ [] -> AtPath;
+ _ ->
+ case lists:prefix(TopPath,AtPath) of
+ true ->
+ lists:subtract(AtPath,TopPath);
+ _ -> []
+ end
+ end,
+ case {NamePath,AtPathBelowTop} of
+ {[H|_T1],[H|_T2]} -> []; % this must be handled in lower level
+ {_,[]} -> [];% this must be handled in an above level
+ {_,[H|_T]} ->
+ case lists:member(H,Cnames) of
+ true -> [AtPathBelowTop];
+ _ -> error({type,{asn1,"failed to analyze at-path",AtPath}})
+ end
+ end;
+evaluate_atpath(_,_,_,_) ->
+ [].
+
+%% Type may be any of SEQUENCE, SET, CHOICE, SEQUENCE OF, SET OF but
+%% only the three first have valid components.
+get_atlist_components(Def) ->
+ get_components(atlist,Def).
+
+get_components(Def) ->
+ get_components(any,Def).
+
+get_components(_,#'SEQUENCE'{components=Cs}) ->
+ Cs;
+get_components(_,#'SET'{components=Cs}) ->
+ Cs;
+get_components(_,{'CHOICE',Cs}) ->
+ Cs;
+get_components(any,{'SEQUENCE OF',#type{def=Def}}) ->
+ get_components(any,Def);
+get_components(any,{'SET OF',#type{def=Def}}) ->
+ get_components(any,Def);
+get_components(_,_) ->
+ [].
+
+
+extract_at_notation([{Level,[#'Externalvaluereference'{value=Name}|Rest]}]) ->
+ {Level,[Name|extract_at_notation1(Rest)]};
+extract_at_notation(At) ->
+ exit({error,{asn1,{at_notation,At}}}).
+extract_at_notation1([#'Externalvaluereference'{value=Name}|Rest]) ->
+ [Name|extract_at_notation1(Rest)];
+extract_at_notation1([]) ->
+ [].
+
+%% componentrelation1/1 identifies all componentrelation constraints
+%% that exist in C or in the substructure of C. Info about the found
+%% constraints are returned in a list. It is ObjectSet, the reference
+%% to the object set, AttrPath, the name atoms extracted from the
+%% at-list in the component relation constraint, ClassDef, the
+%% objectclass record of the class of the ObjectClassFieldType, Path,
+%% that is the component name "path" from the searched level to this
+%% constraint.
+%%
+%% The function is called with one component of the type in turn and
+%% with the component name in Path at the first call. When called from
+%% within, the name of the inner component is added to Path.
+componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
+ Path) ->
+ Ret =
+ case Constraint of
+ [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
+ [{_,AL=[#'Externalvaluereference'{}|_R1]}|_R2] = AtList,
+ %% Note: if Path is longer than one,i.e. it is within
+ %% an inner type of the actual level, then the only
+ %% relevant at-list is of "outermost" type.
+%% #'ObjectClassFieldType'{class=ClassDef} = Def,
+ ClassDef = get_ObjectClassFieldType_classdef(S,Def),
+ AtPath =
+ lists:map(fun(#'Externalvaluereference'{value=V})->V end,
+ AL),
+ {[{ObjectSet,AtPath,ClassDef,Path}],Def};
+ _Other ->
+ %% check the inner type of component
+ innertype_comprel(S,Def,Path)
+ end,
+ case Ret of
+ nofunobj ->
+ nofunobj; %% ignored by caller
+ {CRelI=[{ObjSet,_,_,_}],NewDef} -> %%
+ TCItmp = lists:subtract(TCI,[{objfun,ObjSet}]),
+ {CRelI,C#type{tablecinf=[{objfun,ObjSet}|TCItmp],def=NewDef}};
+ {CompRelInf,NewDef} -> %% more than one tuple in CompRelInf
+ TCItmp = lists:subtract(TCI,[{objfun,anyset}]),
+ {CompRelInf,C#type{tablecinf=[{objfun,anyset}|TCItmp],def=NewDef}}
+ end.
+
+innertype_comprel(S,{'SEQUENCE OF',Type},Path) ->
+ case innertype_comprel1(S,Type,Path) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewType} ->
+ {CompRelInf,{'SEQUENCE OF',NewType}}
+ end;
+innertype_comprel(S,{'SET OF',Type},Path) ->
+ case innertype_comprel1(S,Type,Path) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewType} ->
+ {CompRelInf,{'SET OF',NewType}}
+ end;
+innertype_comprel(S,{'CHOICE',CTypeList},Path) ->
+ case componentlist_comprel(S,CTypeList,[],Path,[]) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewCs} ->
+ {CompRelInf,{'CHOICE',NewCs}}
+ end;
+innertype_comprel(S,Seq = #'SEQUENCE'{components=Cs},Path) ->
+ case componentlist_comprel(S,Cs,[],Path,[]) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewCs} ->
+ {CompRelInf,Seq#'SEQUENCE'{components=NewCs}}
+ end;
+innertype_comprel(S,Set = #'SET'{components=Cs},Path) ->
+ case componentlist_comprel(S,Cs,[],Path,[]) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewCs} ->
+ {CompRelInf,Set#'SET'{components=NewCs}}
+ end;
+innertype_comprel(_,_,_) ->
+ nofunobj.
+
+componentlist_comprel(S,[C = #'ComponentType'{name=Name,typespec=Type}|Cs],
+ Acc,Path,NewCL) ->
+ case catch componentrelation1(S,Type,Path++[Name]) of
+ {'EXIT',_} ->
+ componentlist_comprel(S,Cs,Acc,Path,[C|NewCL]);
+ nofunobj ->
+ componentlist_comprel(S,Cs,Acc,Path,[C|NewCL]);
+ {CRelInf,NewType} ->
+ componentlist_comprel(S,Cs,CRelInf++Acc,Path,
+ [C#'ComponentType'{typespec=NewType}|NewCL])
+ end;
+componentlist_comprel(_,[],Acc,_,NewCL) ->
+ case Acc of
+ [] ->
+ nofunobj;
+ _ ->
+ {Acc,lists:reverse(NewCL)}
+ end.
+
+innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
+ Ret =
+ case Cons of
+ [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
+ %% This AtList must have an "outermost" at sign to be
+ %% relevent here.
+ [{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
+ = AtList,
+%% #'ObjectClassFieldType'{class=ClassDef} = Def,
+ ClassDef = get_ObjectClassFieldType_classdef(S,Def),
+ AtPath =
+ lists:map(fun(#'Externalvaluereference'{value=V})->V end,
+ AL),
+ [{ObjectSet,AtPath,ClassDef,Path}];
+ _ ->
+ innertype_comprel(S,Def,Path)
+ end,
+ case Ret of
+ nofunobj -> nofunobj;
+ L = [{ObjSet,_,_,_}] ->
+ TCItmp = lists:subtract(TCI,[{objfun,ObjSet}]),
+ {L,T#type{tablecinf=[{objfun,ObjSet}|TCItmp]}};
+ {CRelInf,NewDef} ->
+ TCItmp = lists:subtract(TCI,[{objfun,anyset}]),
+ {CRelInf,T#type{def=NewDef,tablecinf=[{objfun,anyset}|TCItmp]}}
+ end.
+
+
+%% leading_attr_index counts the index and picks the name of the
+%% component that is at the actual level in the at-list of the
+%% component relation constraint (AttrP). AbsP is the path of
+%% component names from the top type level to the actual level. AttrP
+%% is a list with the atoms from the at-list.
+leading_attr_index(S,Cs,[H={_,AttrP,_,_}|T],AbsP,Acc) ->
+ AttrInfo =
+ case lists:prefix(AbsP,AttrP) of
+ %% why this ?? It is necessary when in same situation as
+ %% TConstrChoice, there is an inner structure with an
+ %% outermost at-list and the "leading attribute" code gen
+ %% may be at a level some steps below the outermost level.
+ true ->
+ RelativAttrP = lists:subtract(AttrP,AbsP),
+ %% The header is used to calculate the index of the
+ %% component and to give the fun, received from the
+ %% object set look up, an unique name. The tail is
+ %% used to match the proper value input to the fun.
+ {hd(RelativAttrP),tl(RelativAttrP)};
+ false ->
+ {hd(AttrP),tl(AttrP)}
+ end,
+ case leading_attr_index1(S,Cs,H,AttrInfo,1) of
+ 0 ->
+ leading_attr_index(S,Cs,T,AbsP,Acc);
+ Res ->
+ leading_attr_index(S,Cs,T,AbsP,[Res|Acc])
+ end;
+leading_attr_index(_,_Cs,[],_,Acc) ->
+ lists:reverse(Acc).
+
+leading_attr_index1(_,[],_,_,_) ->
+ 0;
+leading_attr_index1(S,[C|Cs],Arg={ObjectSet,_,CDef,P},
+ AttrInfo={Attr,SubAttr},N) ->
+ case C#'ComponentType'.name of
+ Attr ->
+ ValueMatch = value_match(S,C,Attr,SubAttr),
+ {ObjectSet,Attr,N,CDef,P,ValueMatch};
+ _ ->
+ leading_attr_index1(S,Cs,Arg,AttrInfo,N+1)
+ end.
+
+%% value_math gathers information for a proper value match in the
+%% generated encode function. For a SEQUENCE or a SET the index of the
+%% component is counted. For a CHOICE the index is 2.
+value_match(S,C,Name,SubAttr) ->
+ value_match(S,C,Name,SubAttr,[]). % C has name Name
+value_match(_S,#'ComponentType'{},_Name,[],Acc) ->
+ Acc;% do not reverse, indexes in reverse order
+value_match(S,#'ComponentType'{typespec=Type},Name,[At|Ats],Acc) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ Components =
+ case get_atlist_components(Type#type.def) of
+ [] -> error({type,{asn1,"element in at list must be a "
+ "SEQUENCE, SET or CHOICE.",Name},S});
+ Comps -> Comps
+ end,
+ {Index,ValueIndex} = component_value_index(S,InnerType,At,Components),
+ value_match(S,lists:nth(Index,Components),At,Ats,[ValueIndex|Acc]).
+
+component_value_index(S,'CHOICE',At,Components) ->
+ {component_index(S,At,Components),2};
+component_value_index(S,_,At,Components) ->
+ %% SEQUENCE or SET
+ Index = component_index(S,At,Components),
+ {Index,{Index+1,At}}.
+
+component_index(S,Name,Components) ->
+ component_index1(S,Name,Components,1).
+component_index1(_S,Name,[#'ComponentType'{name=Name}|_Cs],N) ->
+ N;
+component_index1(S,Name,[_C|Cs],N) ->
+ component_index1(S,Name,Cs,N+1);
+component_index1(S,Name,[],_) ->
+ error({type,{asn1,"component of at-list was not"
+ " found in substructure",Name},S}).
+
+get_unique_fieldname(ClassDef) ->
+%% {_,Fields,_} = ClassDef#classdef.typespec,
+ Fields = (ClassDef#classdef.typespec)#objectclass.fields,
+ get_unique_fieldname(Fields,[]).
+
+get_unique_fieldname([],[]) ->
+ throw({error,'__undefined_'});
+get_unique_fieldname([],[Name]) ->
+ Name;
+get_unique_fieldname([],Acc) ->
+ throw({asn1,'only one UNIQUE field is allowed in CLASS',Acc});
+get_unique_fieldname([{fixedtypevaluefield,Name,_,'UNIQUE',_}|Rest],Acc) ->
+ get_unique_fieldname(Rest,[Name|Acc]);
+get_unique_fieldname([_H|T],Acc) ->
+ get_unique_fieldname(T,Acc).
+
+get_tableconstraint_info(S,Type,{CheckedTs,EComps}) ->
+ {get_tableconstraint_info(S,Type,CheckedTs,[]),
+ get_tableconstraint_info(S,Type,EComps,[])};
+get_tableconstraint_info(S,Type,CheckedTs) ->
+ get_tableconstraint_info(S,Type,CheckedTs,[]).
+
+get_tableconstraint_info(_S,_Type,[],Acc) ->
+ lists:reverse(Acc);
+get_tableconstraint_info(S,Type,[C|Cs],Acc) ->
+ CheckedTs = C#'ComponentType'.typespec,
+ AccComp =
+ case CheckedTs#type.def of
+ %% ObjectClassFieldType
+ OCFT=#'ObjectClassFieldType'{class=#objectclass{},
+ type=_AType} ->
+% AType = get_ObjectClassFieldType(S,Fields,FieldRef),
+% RefedFieldName =
+% get_referencedclassfield(CheckedTs#type.def),%is probably obsolete
+ NewOCFT =
+ OCFT#'ObjectClassFieldType'{class=[]},
+ C#'ComponentType'{typespec=
+ CheckedTs#type{
+% def=AType,
+ def=NewOCFT
+ }};
+% constraint=[{tableconstraint_info,
+% FieldRef}]}};
+ {'SEQUENCE OF',SOType} when record(SOType,type),
+ (element(1,SOType#type.def)=='CHOICE') ->
+ CTypeList = element(2,SOType#type.def),
+ NewInnerCList =
+ get_tableconstraint_info(S,Type,CTypeList,[]),
+ C#'ComponentType'{typespec=
+ CheckedTs#type{
+ def={'SEQUENCE OF',
+ SOType#type{def={'CHOICE',
+ NewInnerCList}}}}};
+ {'SET OF',SOType} when record(SOType,type),
+ (element(1,SOType#type.def)=='CHOICE') ->
+ CTypeList = element(2,SOType#type.def),
+ NewInnerCList =
+ get_tableconstraint_info(S,Type,CTypeList,[]),
+ C#'ComponentType'{typespec=
+ CheckedTs#type{
+ def={'SET OF',
+ SOType#type{def={'CHOICE',
+ NewInnerCList}}}}};
+ _ ->
+ C
+ end,
+ get_tableconstraint_info(S,Type,Cs,[AccComp|Acc]).
+
+get_referenced_fieldname([{_,FirstFieldname}]) ->
+ {FirstFieldname,[]};
+get_referenced_fieldname([{_,FirstFieldname}|Rest]) ->
+ {FirstFieldname,lists:map(fun(X)->element(2,X) end,Rest)};
+get_referenced_fieldname(Def) ->
+ {no_type,Def}.
+
+%% get_ObjectClassFieldType extracts the type from the chain of
+%% objects that leads to a final type.
+get_ObjectClassFieldType(S,ERef,PrimFieldNameList) when
+ record(ERef,'Externaltypereference') ->
+ {_,Type} = get_referenced_type(S,ERef),
+ ClassSpec = check_class(S,Type),
+ Fields = ClassSpec#objectclass.fields,
+ get_ObjectClassFieldType(S,Fields,PrimFieldNameList);
+get_ObjectClassFieldType(S,Fields,L=[_PrimFieldName1|_Rest]) ->
+ check_PrimitiveFieldNames(S,Fields,L),
+ get_OCFType(S,Fields,L).
+
+check_PrimitiveFieldNames(_S,_Fields,_) ->
+ ok.
+
+%% get_ObjectClassFieldType_classdef gets the def of the class of the
+%% ObjectClassFieldType, i.e. the objectclass record. If the type has
+%% been checked (it may be a field type of an internal SEQUENCE) the
+%% class field = [], then the classdef has to be fetched by help of
+%% the class reference in the classname field.
+get_ObjectClassFieldType_classdef(S,#'ObjectClassFieldType'{classname=Name,
+ class=[]}) ->
+ {_,#classdef{typespec=TS}} = get_referenced_type(S,Name),
+ TS;
+get_ObjectClassFieldType_classdef(_,#'ObjectClassFieldType'{class=Cl}) ->
+ Cl.
+
+get_OCFType(S,Fields,[{_FieldType,PrimFieldName}|Rest]) ->
+ case lists:keysearch(PrimFieldName,2,Fields) of
+ {value,{fixedtypevaluefield,_,Type,_Unique,_OptSpec}} ->
+ {fixedtypevaluefield,PrimFieldName,Type};
+ {value,{objectfield,_,Type,_Unique,_OptSpec}} ->
+ {_,ClassDef} = get_referenced_type(S,Type#type.def),
+ CheckedCDef = check_class(S#state{type=ClassDef,
+ tname=ClassDef#classdef.name},
+ ClassDef#classdef.typespec),
+ get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
+ {value,{objectsetfield,_,Type,_OptSpec}} ->
+ {_,ClassDef} = get_referenced_type(S,Type#type.def),
+ CheckedCDef = check_class(S#state{type=ClassDef,
+ tname=ClassDef#classdef.name},
+ ClassDef#classdef.typespec),
+ get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
+
+ {value,Other} ->
+ {element(1,Other),PrimFieldName};
+ _ ->
+ error({type,"undefined FieldName in ObjectClassFieldType",S})
+ end.
+
+get_taglist(#state{erule=per},_) ->
+ [];
+get_taglist(#state{erule=per_bin},_) ->
+ [];
+get_taglist(S,Ext) when record(Ext,'Externaltypereference') ->
+ {_,T} = get_referenced_type(S,Ext),
+ get_taglist(S,T#typedef.typespec);
+get_taglist(S,Tref) when record(Tref,typereference) ->
+ {_,T} = get_referenced_type(S,Tref),
+ get_taglist(S,T#typedef.typespec);
+get_taglist(S,Type) when record(Type,type) ->
+ case Type#type.tag of
+ [] ->
+ get_taglist(S,Type#type.def);
+ [Tag|_] ->
+% case lists:member(S#state.erule,[ber,ber_bin]) of
+% true ->
+% lists:map(fun(Tx) -> asn1ct_gen:def_to_tag(Tx) end,Type#type.tag);
+% _ ->
+ [asn1ct_gen:def_to_tag(Tag)]
+% end
+ end;
+get_taglist(S,{'CHOICE',{Rc,Ec}}) ->
+ get_taglist(S,{'CHOICE',Rc ++ Ec});
+get_taglist(S,{'CHOICE',Components}) ->
+ get_taglist1(S,Components);
+%% ObjectClassFieldType OTP-4390
+get_taglist(_S,#'ObjectClassFieldType'{type={typefield,_}}) ->
+ [];
+get_taglist(S,#'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}) ->
+ get_taglist(S,Type);
+get_taglist(S,{ERef=#'Externaltypereference'{},FieldNameList})
+ when list(FieldNameList) ->
+ case get_ObjectClassFieldType(S,ERef,FieldNameList) of
+ Type when record(Type,type) ->
+ get_taglist(S,Type);
+ {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
+ {TypeFieldName,_} when atom(TypeFieldName) -> []%should check if allowed
+ end;
+get_taglist(S,{ObjCl,FieldNameList}) when record(ObjCl,objectclass),
+ list(FieldNameList) ->
+ case get_ObjectClassFieldType(S,ObjCl#objectclass.fields,FieldNameList) of
+ Type when record(Type,type) ->
+ get_taglist(S,Type);
+ {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
+ {TypeFieldName,_} when atom(TypeFieldName) -> []%should check if allowed
+ end;
+get_taglist(S,Def) ->
+ case lists:member(S#state.erule,[ber_bin_v2]) of
+ false ->
+ case Def of
+ 'ASN1_OPEN_TYPE' -> % open_type has no UNIVERSAL tag as such
+ [];
+ _ ->
+ [asn1ct_gen:def_to_tag(Def)]
+ end;
+ _ ->
+ []
+ end.
+
+get_taglist1(S,[#'ComponentType'{name=_Cname,tags=TagL}|Rest]) when list(TagL) ->
+ %% tag_list has been here , just return TagL and continue with next alternative
+ TagL ++ get_taglist1(S,Rest);
+get_taglist1(S,[#'ComponentType'{typespec=Ts,tags=undefined}|Rest]) ->
+ get_taglist(S,Ts) ++ get_taglist1(S,Rest);
+get_taglist1(S,[_H|Rest]) -> % skip EXTENSIONMARK
+ get_taglist1(S,Rest);
+get_taglist1(_S,[]) ->
+ [].
+
+dbget_ex(_S,Module,Key) ->
+ case asn1_db:dbget(Module,Key) of
+ undefined ->
+
+ throw({error,{asn1,{undefined,{Module,Key}}}}); % this is catched on toplevel type or value
+ T -> T
+ end.
+
+merge_tags(T1, T2) when list(T2) ->
+ merge_tags2(T1 ++ T2, []);
+merge_tags(T1, T2) ->
+ merge_tags2(T1 ++ [T2], []).
+
+merge_tags2([T1= #tag{type='IMPLICIT'}, T2 |Rest], Acc) ->
+ merge_tags2([T1#tag{type=T2#tag.type, form=T2#tag.form}|Rest],Acc);
+merge_tags2([T1= #tag{type={default,'IMPLICIT'}}, T2 |Rest], Acc) ->
+ merge_tags2([T1#tag{type=T2#tag.type, form=T2#tag.form}|Rest],Acc);
+merge_tags2([H|T],Acc) ->
+ merge_tags2(T, [H|Acc]);
+merge_tags2([], Acc) ->
+ lists:reverse(Acc).
+
+merge_constraints(C1, []) ->
+ C1;
+merge_constraints([], C2) ->
+ C2;
+merge_constraints(C1, C2) ->
+ {SList,VList,PAList,Rest} = splitlist(C1++C2,[],[],[],[]),
+ SizeC = merge_constraints(SList),
+ ValueC = merge_constraints(VList),
+ PermAlphaC = merge_constraints(PAList),
+ case Rest of
+ [] ->
+ SizeC ++ ValueC ++ PermAlphaC;
+ _ ->
+ throw({error,{asn1,{not_implemented,{merge_constraints,Rest}}}})
+ end.
+
+merge_constraints([]) -> [];
+merge_constraints([C1 = {_,{Low1,High1}},{_,{Low2,High2}}|Rest]) when Low1 >= Low2,
+ High1 =< High2 ->
+ merge_constraints([C1|Rest]);
+merge_constraints([C1={'PermittedAlphabet',_},C2|Rest]) ->
+ [C1|merge_constraints([C2|Rest])];
+merge_constraints([C1 = {_,{_Low1,_High1}},C2 = {_,{_Low2,_High2}}|_Rest]) ->
+ throw({error,asn1,{conflicting_constraints,{C1,C2}}});
+merge_constraints([C]) ->
+ [C].
+
+splitlist([C={'SizeConstraint',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,[C|Sacc],Vacc,PAacc,Restacc);
+splitlist([C={'ValueRange',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,Sacc,[C|Vacc],PAacc,Restacc);
+splitlist([C={'PermittedAlphabet',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,Sacc,Vacc,[C|PAacc],Restacc);
+splitlist([C|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,Sacc,Vacc,PAacc,[C|Restacc]);
+splitlist([],Sacc,Vacc,PAacc,Restacc) ->
+ {lists:reverse(Sacc),
+ lists:reverse(Vacc),
+ lists:reverse(PAacc),
+ lists:reverse(Restacc)}.
+
+
+
+storeindb(M) when record(M,module) ->
+ TVlist = M#module.typeorval,
+ NewM = M#module{typeorval=findtypes_and_values(TVlist)},
+ asn1_db:dbnew(NewM#module.name),
+ asn1_db:dbput(NewM#module.name,'MODULE', NewM),
+ Res = storeindb(NewM#module.name,TVlist,[]),
+ include_default_class(NewM#module.name),
+ include_default_type(NewM#module.name),
+ Res.
+
+storeindb(Module,[H|T],ErrAcc) when record(H,typedef) ->
+ storeindb(Module,H#typedef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,valuedef) ->
+ storeindb(Module,H#valuedef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,ptypedef) ->
+ storeindb(Module,H#ptypedef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,classdef) ->
+ storeindb(Module,H#classdef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,pvaluesetdef) ->
+ storeindb(Module,H#pvaluesetdef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,pobjectdef) ->
+ storeindb(Module,H#pobjectdef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,pvaluedef) ->
+ storeindb(Module,H#pvaluedef.name,H,T,ErrAcc);
+storeindb(_,[],[]) -> ok;
+storeindb(_,[],ErrAcc) ->
+ {error,ErrAcc}.
+
+storeindb(Module,Name,H,T,ErrAcc) ->
+ case asn1_db:dbget(Module,Name) of
+ undefined ->
+ asn1_db:dbput(Module,Name,H),
+ storeindb(Module,T,ErrAcc);
+ _ ->
+ case H of
+ _Type when record(H,typedef) ->
+ error({type,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,valuedef) ->
+ error({value,"already defined",
+ #state{mname=Module,value=H,vname=Name}});
+ _Type when record(H,ptypedef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,pobjectdef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,pvaluesetdef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,pvaluedef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,classdef) ->
+ error({class,"already defined",
+ #state{mname=Module,value=H,vname=Name}})
+ end,
+ storeindb(Module,T,[H|ErrAcc])
+ end.
+
+findtypes_and_values(TVList) ->
+ findtypes_and_values(TVList,[],[],[],[],[],[]).%% Types,Values,
+%% Parameterizedtypes,Classes,Objects and ObjectSets
+
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,typedef),record(H#typedef.typespec,'Object') ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,[H#typedef.name|Oacc],OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,typedef),record(H#typedef.typespec,'ObjectSet') ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,Oacc,[H#typedef.name|OSacc]);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,typedef) ->
+ findtypes_and_values(T,[H#typedef.name|Tacc],Vacc,Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,valuedef) ->
+ findtypes_and_values(T,Tacc,[H#valuedef.name|Vacc],Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,ptypedef) ->
+ findtypes_and_values(T,Tacc,Vacc,[H#ptypedef.name|Pacc],Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,classdef) ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,[H#classdef.name|Cacc],Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pvaluedef) ->
+ findtypes_and_values(T,Tacc,[H#pvaluedef.name|Vacc],Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pvaluesetdef) ->
+ findtypes_and_values(T,Tacc,[H#pvaluesetdef.name|Vacc],Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pobjectdef) ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,[H#pobjectdef.name|Oacc],OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pobjectsetdef) ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,Oacc,[H#pobjectsetdef.name|OSacc]);
+findtypes_and_values([],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc) ->
+ {lists:reverse(Tacc),lists:reverse(Vacc),lists:reverse(Pacc),
+ lists:reverse(Cacc),lists:reverse(Oacc),lists:reverse(OSacc)}.
+
+
+
+error({export,Msg,#state{mname=Mname,type=Ref,tname=Typename}}) ->
+ Pos = Ref#'Externaltypereference'.pos,
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Typename,Msg]),
+ {error,{export,Pos,Mname,Typename,Msg}};
+error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
+ when record(Type,typedef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",
+ [Type#typedef.pos,Mname,Typename,Msg]),
+ {error,{type,Type#typedef.pos,Mname,Typename,Msg}};
+error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
+ when record(Type,ptypedef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",
+ [Type#ptypedef.pos,Mname,Typename,Msg]),
+ {error,{type,Type#ptypedef.pos,Mname,Typename,Msg}};
+error({type,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
+ when record(Value,valuedef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
+ {error,{type,Value#valuedef.pos,Mname,Valuename,Msg}};
+error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
+ when record(Type,pobjectdef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",
+ [Type#pobjectdef.pos,Mname,Typename,Msg]),
+ {error,{type,Type#pobjectdef.pos,Mname,Typename,Msg}};
+error({value,Msg,#state{mname=Mname,value=Value,vname=Valuename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
+ {error,{value,Value#valuedef.pos,Mname,Valuename,Msg}};
+error({Other,Msg,#state{mname=Mname,value=#valuedef{pos=Pos},vname=Valuename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Valuename,Msg]),
+ {error,{Other,Pos,Mname,Valuename,Msg}};
+error({Other,Msg,#state{mname=Mname,type=#typedef{pos=Pos},tname=Typename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Typename,Msg]),
+ {error,{Other,Pos,Mname,Typename,Msg}};
+error({Other,Msg,#state{mname=Mname,type=#classdef{pos=Pos},tname=Typename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Typename,Msg]),
+ {error,{Other,Pos,Mname,Typename,Msg}}.
+
+include_default_type(Module) ->
+ NameAbsList = default_type_list(),
+ include_default_type1(Module,NameAbsList).
+
+include_default_type1(_,[]) ->
+ ok;
+include_default_type1(Module,[{Name,TS}|Rest]) ->
+ case asn1_db:dbget(Module,Name) of
+ undefined ->
+ T = #typedef{name=Name,
+ typespec=TS},
+ asn1_db:dbput(Module,Name,T);
+ _ -> ok
+ end,
+ include_default_type1(Module,Rest).
+
+default_type_list() ->
+ %% The EXTERNAL type is represented, according to ASN.1 1997,
+ %% as a SEQUENCE with components: identification, data-value-descriptor
+ %% and data-value.
+ Syntax =
+ #'ComponentType'{name=syntax,
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ Presentation_Cid =
+ #'ComponentType'{name='presentation-context-id',
+ typespec=#type{def='INTEGER'},
+ prop=mandatory},
+ Transfer_syntax =
+ #'ComponentType'{name='transfer-syntax',
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ Negotiation_items =
+ #type{def=
+ #'SEQUENCE'{components=
+ [Presentation_Cid,
+ Transfer_syntax#'ComponentType'{prop=mandatory}]}},
+ Context_negot =
+ #'ComponentType'{name='context-negotiation',
+ typespec=Negotiation_items,
+ prop=mandatory},
+
+ Data_value_descriptor =
+ #'ComponentType'{name='data-value-descriptor',
+ typespec=#type{def='ObjectDescriptor'},
+ prop='OPTIONAL'},
+ Data_value =
+ #'ComponentType'{name='data-value',
+ typespec=#type{def='OCTET STRING'},
+ prop=mandatory},
+
+ %% The EXTERNAL type is represented, according to ASN.1 1990,
+ %% as a SEQUENCE with components: direct-reference, indirect-reference,
+ %% data-value-descriptor and encoding.
+
+ Direct_reference =
+ #'ComponentType'{name='direct-reference',
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop='OPTIONAL'},
+
+ Indirect_reference =
+ #'ComponentType'{name='indirect-reference',
+ typespec=#type{def='INTEGER'},
+ prop='OPTIONAL'},
+
+ Single_ASN1_type =
+ #'ComponentType'{name='single-ASN1-type',
+ typespec=#type{tag=[{tag,'CONTEXT',0,
+ 'EXPLICIT',32}],
+ def='ANY'},
+ prop=mandatory},
+
+ Octet_aligned =
+ #'ComponentType'{name='octet-aligned',
+ typespec=#type{tag=[{tag,'CONTEXT',1,
+ 'IMPLICIT',32}],
+ def='OCTET STRING'},
+ prop=mandatory},
+
+ Arbitrary =
+ #'ComponentType'{name=arbitrary,
+ typespec=#type{tag=[{tag,'CONTEXT',2,
+ 'IMPLICIT',32}],
+ def={'BIT STRING',[]}},
+ prop=mandatory},
+
+ Encoding =
+ #'ComponentType'{name=encoding,
+ typespec=#type{def={'CHOICE',
+ [Single_ASN1_type,Octet_aligned,
+ Arbitrary]}},
+ prop=mandatory},
+
+ EXTERNAL_components1990 =
+ [Direct_reference,Indirect_reference,Data_value_descriptor,Encoding],
+
+ %% The EMBEDDED PDV type is represented by a SEQUENCE type
+ %% with components: identification and data-value
+ Abstract =
+ #'ComponentType'{name=abstract,
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ Transfer =
+ #'ComponentType'{name=transfer,
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ AbstractTrSeq =
+ #'SEQUENCE'{components=[Abstract,Transfer]},
+ Syntaxes =
+ #'ComponentType'{name=syntaxes,
+ typespec=#type{def=AbstractTrSeq},
+ prop=mandatory},
+ Fixed = #'ComponentType'{name=fixed,
+ typespec=#type{def='NULL'},
+ prop=mandatory},
+ Negotiations =
+ [Syntaxes,Syntax,Presentation_Cid,Context_negot,
+ Transfer_syntax,Fixed],
+ Identification2 =
+ #'ComponentType'{name=identification,
+ typespec=#type{def={'CHOICE',Negotiations}},
+ prop=mandatory},
+ EmbeddedPdv_components =
+ [Identification2,Data_value],
+
+ %% The CHARACTER STRING type is represented by a SEQUENCE type
+ %% with components: identification and string-value
+ String_value =
+ #'ComponentType'{name='string-value',
+ typespec=#type{def='OCTET STRING'},
+ prop=mandatory},
+ CharacterString_components =
+ [Identification2,String_value],
+
+ [{'EXTERNAL',
+ #type{tag=[#tag{class='UNIVERSAL',
+ number=8,
+ type='IMPLICIT',
+ form=32}],
+ def=#'SEQUENCE'{components=
+ EXTERNAL_components1990}}},
+ {'EMBEDDED PDV',
+ #type{tag=[#tag{class='UNIVERSAL',
+ number=11,
+ type='IMPLICIT',
+ form=32}],
+ def=#'SEQUENCE'{components=EmbeddedPdv_components}}},
+ {'CHARACTER STRING',
+ #type{tag=[#tag{class='UNIVERSAL',
+ number=29,
+ type='IMPLICIT',
+ form=32}],
+ def=#'SEQUENCE'{components=CharacterString_components}}}
+ ].
+
+
+include_default_class(Module) ->
+ NameAbsList = default_class_list(),
+ include_default_class1(Module,NameAbsList).
+
+include_default_class1(_,[]) ->
+ ok;
+include_default_class1(Module,[{Name,TS}|_Rest]) ->
+ case asn1_db:dbget(Module,Name) of
+ undefined ->
+ C = #classdef{checked=true,name=Name,
+ typespec=TS},
+ asn1_db:dbput(Module,Name,C);
+ _ -> ok
+ end.
+
+default_class_list() ->
+ [{'TYPE-IDENTIFIER',
+ {objectclass,
+ [{fixedtypevaluefield,
+ id,
+ {type,[],'OBJECT IDENTIFIER',[]},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'}],
+ {'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id}]}}},
+ {'ABSTRACT-SYNTAX',
+ {objectclass,
+ [{fixedtypevaluefield,
+ id,
+ {type,[],'OBJECT IDENTIFIER',[]},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'},
+ {fixedtypevaluefield,
+ property,
+ {type,
+ [],
+ {'BIT STRING',[]},
+ []},
+ undefined,
+ {'DEFAULT',
+ [0,1,0]}}],
+ {'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id},
+ ['HAS',
+ 'PROPERTY',
+ {valuefieldreference,property}]]}}}].
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
new file mode 100644
index 0000000000..8a639de5bb
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
@@ -0,0 +1,1468 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_constructed_ber.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_constructed_ber).
+
+-export([gen_encode_sequence/3]).
+-export([gen_decode_sequence/3]).
+-export([gen_encode_set/3]).
+-export([gen_decode_set/3]).
+-export([gen_encode_sof/4]).
+-export([gen_decode_sof/4]).
+-export([gen_encode_choice/3]).
+-export([gen_decode_choice/3]).
+
+%%%% Application internal exports
+-export([match_tag/2]).
+
+-include("asn1_records.hrl").
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+
+ %% if EXTERNAL type the input value must be transformed to
+ %% ASN1 1990 format
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([" NewVal = asn1rt_check:transform_to_EXTERNAL1990(Val),",
+ nl]);
+ _ ->
+ ok
+ end,
+
+ {SeqOrSet,TableConsInfo,CompList} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {'SEQUENCE',TCI,CL};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {'SET',TCI,CL}
+ end,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ EncObj =
+ case TableConsInfo of
+ #simpletableattributes{usedclassfield=Used,
+ uniqueclassfield=Unique} when Used /= Unique ->
+ false;
+ %% ObjectSet, name of the object set in constraints
+ %%
+ %%{ObjectSet,AttrN,N,UniqueFieldName}
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex
+ } ->
+ OSDef =
+ case ObjectSet of
+ {Module,OSName} ->
+ asn1_db:dbget(Module,OSName);
+ OSName ->
+ asn1_db:dbget(get(currmod),OSName)
+ end,
+% io:format("currmod: ~p~nOSName: ~p~nAttrN: ~p~nN: ~p~nUniqueFieldName: ~p~n",
+% [get(currmod),OSName,AttrN,N,UniqueFieldName]),
+ case (OSDef#typedef.typespec)#'ObjectSet'.gen of
+ true ->
+% Val = lists:concat(["?RT_BER:cindex(",
+% N+1,",Val,"]),
+ ObjectEncode =
+ asn1ct_gen:un_hyphen_var(lists:concat(['Obj',
+ AttrN])),
+ emit({ObjectEncode," = ",nl}),
+ emit({" 'getenc_",ObjectSet,"'(",{asis,UniqueFieldName},
+ ", ",nl}),
+% emit({indent(35),"?RT_BER:cindex(",N+1,", Val,",
+% {asis,AttrN},")),",nl}),
+ emit([indent(10+length(atom_to_list(ObjectSet))),
+ "value_match(",{asis,ValueIndex},",",
+ "?RT_BER:cindex(",N+1,",Val,",
+ {asis,AttrN},"))),",nl]),
+ notice_value_match(),
+ {AttrN,ObjectEncode};
+ _ ->
+ false
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ %% when the simpletableattributes was at an
+ %% outer level and the objfun has been passed
+ %% through the function call
+ {"got objfun through args","ObjFun"};
+ _ ->
+ false
+ end
+ end,
+
+ gen_enc_sequence_call(Erules,Typename,CompList1,1,Ext,EncObj),
+
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type(SeqOrSet),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([nl," BytesSoFar = "]),
+ case SeqOrSet of
+ 'SET' when (D#type.def)#'SET'.sorted == dynamic ->
+ emit("?RT_BER:dynamicsort_SET_components(["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["]),",nl]);
+ _ ->
+ emit("["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["],",nl])
+ end,
+ emit(" LenSoFar = "),
+ case asn1ct_name:all(encLen) of
+ [] -> emit("0");
+ AllLengths ->
+ mkvplus(AllLengths)
+ end,
+ emit([",",nl]),
+% emit(["{TagBytes,Len} = ?RT_BER:encode_tags(TagIn ++ ",
+ emit([" ?RT_BER:encode_tags(TagIn ++ ",
+ {asis,MyTag},", BytesSoFar, LenSoFar).",nl]).
+
+
+gen_decode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+% asn1ct_name:new(term),
+ asn1ct_name:new(tag),
+ #'SEQUENCE'{tablecinf=TableConsInfo,components=CList} = D#type.def,
+ Ext = extensible(CList),
+ CompList = case CList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CList
+ end,
+
+ emit({" %%-------------------------------------------------",nl}),
+ emit({" %% decode tag and length ",nl}),
+ emit({" %%-------------------------------------------------",nl}),
+
+ asn1ct_name:new(rb),
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type('SEQUENCE'),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([" {{_,",asn1ct_gen_ber:unused_var("Len",D#type.def),"},",{next,bytes},",",{curr,rb},
+ "} = ?RT_BER:check_tags(TagIn ++ ",{asis,MyTag},", ",
+ {curr,bytes},", OptOrMand), ",nl]),
+ asn1ct_name:new(bytes),
+ asn1ct_name:new(len),
+
+ case CompList of
+ [] -> true;
+ _ ->
+ emit({"{",{next,bytes},
+ ",RemBytes} = ?RT_BER:split_list(",
+ {curr,bytes},
+ ",", {prev,len},"),",nl}),
+ asn1ct_name:new(bytes)
+ end,
+
+ {DecObjInf,UniqueFName,ValueIndex} =
+ case TableConsInfo of
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValIndex
+ } ->
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_R]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ %%AttributeName = asn1ct_gen:un_hyphen_var(AttrN),
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName,
+ ValIndex}},
+ UniqueFieldName,ValIndex};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName,ValIndex}
+ end;
+ _ ->
+ {false,false,false}
+ end,
+ case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
+ no_terms -> % an empty sequence
+ emit([nl,nl]),
+ demit({"Result = "}), %dbg
+ %% return value as record
+ asn1ct_name:new(rb),
+ emit([" {{'",asn1ct_gen:list2rname(Typename),"'}, ",{curr,bytes},",",nl," "]),
+ asn1ct_gen_ber:add_removed_bytes(),
+ emit(["}.",nl]);
+ {LeadingAttrTerm,PostponedDecArgs} ->
+ emit([com,nl,nl]),
+ case {LeadingAttrTerm,PostponedDecArgs} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],PostponedDecArgs} ->
+ DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
+ ValueMatch = value_match(ValueIndex,Term),
+ emit([DecObj," =",nl," 'getdec_",ObjSet,"'(",
+% {asis,UniqueFName},", ",Term,"),",nl}),
+ {asis,UniqueFName},", ",ValueMatch,"),",nl]),
+ gen_dec_postponed_decs(DecObj,PostponedDecArgs)
+ end,
+ demit({"Result = "}), %dbg
+ %% return value as record
+ asn1ct_name:new(rb),
+ asn1ct_name:new(bytes),
+ ExtStatus = case Ext of
+ {ext,_,_} -> ext;
+ noext -> noext
+ end,
+ emit([" {",{next,bytes},",",{curr,rb},"} = ?RT_BER:restbytes2(RemBytes, ",
+ {curr,bytes},",",ExtStatus,"),",nl]),
+ asn1ct_name:new(rb),
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([" OldFormat={'",asn1ct_gen:list2rname(Typename),
+ "', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["},",nl]),
+ emit([" ASN11994Format =",nl,
+ " asn1rt_check:transform_to_EXTERNAL1994",
+ "(OldFormat),",nl]),
+ emit([" {ASN11994Format,",{next,bytes},", "]);
+ _ ->
+ emit([" {{'",asn1ct_gen:list2rname(Typename),"', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["}, ",{next,bytes},", "])
+ end,
+ asn1ct_gen_ber:add_removed_bytes(),
+ emit(["}.",nl])
+ end.
+
+gen_dec_postponed_decs(_,[]) ->
+ emit(nl);
+gen_dec_postponed_decs(DecObj,[{_Cname,{FirstPFN,PFNList},Term,TmpTerm,_Tag,OptOrMand}|Rest]) ->
+% asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+
+ emit({"{",Term,", _, _} = ",nl}),
+ N = case OptOrMand of
+ mandatory -> 0;
+ 'OPTIONAL' ->
+ emit_opt_or_mand_check(asn1_NOVALUE,TmpTerm),
+ 6;
+ {'DEFAULT',Val} ->
+ emit_opt_or_mand_check(Val,TmpTerm),
+ 6
+ end,
+ emit({indent(N+3),"case (catch ",DecObj,"(",{asis,FirstPFN},
+% ", ",TmpTerm,", ", {asis,Tag},", ",{asis,PFNList},")) of",nl}),
+ ", ",TmpTerm,", [], ",{asis,PFNList},")) of",nl}),
+ emit({indent(N+6),"{'EXIT', ",{curr,reason},"} ->",nl}),
+ emit({indent(N+9),"exit({'Type not compatible with table constraint',",
+ {curr,reason},"});",nl}),
+ emit({indent(N+6),{curr,tmpterm}," ->",nl}),
+ emit({indent(N+9),{curr,tmpterm},nl}),
+
+ case OptOrMand of
+ mandatory -> emit([indent(N+3),"end,",nl]);
+ _ ->
+ emit([indent(N+3),"end",nl,
+ indent(3),"end,",nl])
+ end,
+% emit({indent(3),"end,",nl}),
+ gen_dec_postponed_decs(DecObj,Rest).
+
+
+emit_opt_or_mand_check(Value,TmpTerm) ->
+ emit([indent(3),"case ",TmpTerm," of",nl,
+ indent(6),{asis,Value}," -> {",{asis,Value},",[],[]};",nl,
+ indent(6),"_ ->",nl]).
+
+%%============================================================================
+%% Encode/decode SET
+%%
+%%============================================================================
+
+gen_encode_set(Erules,Typename,D) when record(D,type) ->
+ gen_encode_sequence(Erules,Typename,D).
+
+gen_decode_set(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(tag),
+ #'SET'{components=TCompList} = D#type.def,
+ Ext = extensible(TCompList),
+ CompList = case TCompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> TCompList
+ end,
+
+ emit([" %%-------------------------------------------------",nl]),
+ emit([" %% decode tag and length ",nl]),
+ emit([" %%-------------------------------------------------",nl]),
+
+ asn1ct_name:new(rb),
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type('SET'),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([" {{_,Len},",{next,bytes},",",{curr,rb},
+ "} = ?RT_BER:check_tags(TagIn ++ ",{asis,MyTag},", ",
+ {curr,bytes},", OptOrMand), ",nl]),
+ asn1ct_name:new(bytes),
+ asn1ct_name:new(len),
+ asn1ct_name:new(rb),
+
+ emit([" {SetTerm, SetBytes, ",{curr,rb},"} = ?RT_BER:decode_set(0, Len, ",
+ {curr,bytes},", OptOrMand, ",
+ "fun 'dec_",asn1ct_gen:list2name(Typename),"_fun'/2, []),",nl]),
+
+ asn1ct_name:new(rb),
+ emit([" 'dec_",asn1ct_gen:list2name(Typename),"_result'(lists:sort(SetTerm), SetBytes, "]),
+ asn1ct_gen_ber:add_removed_bytes(),
+ emit([").",nl,nl,nl]),
+
+ emit({"%%-------------------------------------------------",nl}),
+ emit({"%% Set loop fun for ",asn1ct_gen:list2name(Typename),nl}),
+ emit({"%%-------------------------------------------------",nl}),
+
+ asn1ct_name:clear(),
+ asn1ct_name:new(term),
+ emit(["'dec_",asn1ct_gen:list2name(Typename),"_fun'(",{curr,bytes},
+ ", OptOrMand) ->",nl]),
+
+ asn1ct_name:new(bytes),
+ gen_dec_set(Erules,Typename,CompList,1,Ext),
+
+ emit([" %% tag not found, if extensionmark we should skip bytes here",nl]),
+ emit([indent(6),"_ -> {[], Bytes,0}",nl]),
+ emit([indent(3),"end.",nl,nl,nl]),
+
+
+ emit({"%%-------------------------------------------------",nl}),
+ emit({"%% Result ",asn1ct_gen:list2name(Typename),nl}),
+ emit({"%%-------------------------------------------------",nl}),
+
+ asn1ct_name:clear(),
+ emit({"'dec_",asn1ct_gen:list2name(Typename),"_result'(",
+ asn1ct_gen_ber:unused_var("TermList",D#type.def),", Bytes, Rb) ->",nl}),
+
+ case gen_dec_set_result(Erules,Typename,CompList) of
+ no_terms ->
+ %% return value as record
+ asn1ct_name:new(rb),
+ emit({" {{'",asn1ct_gen:list2rname(Typename),"'}, Bytes, Rb}.",nl});
+ _ ->
+ emit({nl," case ",{curr,termList}," of",nl}),
+ emit({" [] -> {{'",asn1ct_gen:list2rname(Typename),"', "}),
+ mkvlist(asn1ct_name:all(term)),
+ emit({"}, Bytes, Rb};",nl}),
+ emit({" ExtraAtt -> exit({error,{asn1,{too_many_attributes, ExtraAtt}}})",nl}),
+ emit({" end.",nl}),
+ emit({nl,nl,nl})
+ end.
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE OF and SET OF
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sof(Erules,Typename,_InnerTypename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, Cont} = D#type.def,
+
+ Objfun = case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+
+ emit({" {EncBytes,EncLen} = 'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(Val",Objfun,",[],0),",nl}),
+
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type(SeqOrSetOf),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+% gen_encode_tags(Erules,MyTag,"EncLen","EncBytes"),
+ emit([" ?RT_BER:encode_tags(TagIn ++ ",
+ {asis,MyTag},", EncBytes, EncLen).",nl,nl]),
+
+ gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont).
+% gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",0,
+% mandatory,"{EncBytes,EncLen} = "),
+
+
+gen_decode_sof(Erules,Typename,_InnerTypename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, TypeTag, Cont} =
+ case D#type.def of
+ {'SET OF',_Cont} -> {'SET OF','SET',_Cont};
+ {'SEQUENCE OF',_Cont} -> {'SEQUENCE OF','SEQUENCE',_Cont}
+ end,
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+
+ emit({" %%-------------------------------------------------",nl}),
+ emit({" %% decode tag and length ",nl}),
+ emit({" %%-------------------------------------------------",nl}),
+
+ asn1ct_name:new(rb),
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type(TypeTag),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([" {{_,Len},",{next,bytes},",",{curr,rb},
+ "} = ?RT_BER:check_tags(TagIn ++ ",{asis,MyTag},", ",
+ {curr,bytes},", OptOrMand), ",nl]),
+
+ emit([" ?RT_BER:decode_components(",{curr,rb}]),
+ InnerType = asn1ct_gen:get_inner(Cont#type.def),
+ ContName = case asn1ct_gen:type(InnerType) of
+ Atom when atom(Atom) -> Atom;
+ _ -> TypeNameSuffix
+ end,
+ emit([", Len, ",{next,bytes},", "]),
+% NewCont =
+% case Cont#type.def of
+% {'ENUMERATED',_,Components}->
+% Cont#type{def={'ENUMERATED',Components}};
+% _ -> Cont
+% end,
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ []
+ end,
+ gen_dec_line_sof(Erules,Typename,ContName,Cont,ObjFun),
+ emit([", []).",nl,nl,nl]).
+
+
+gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
+ when record(Cont,type)->
+
+ {Objfun,ObjFun_novar,EncObj} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _",{no_attr,"ObjFun"}};
+ _ ->
+ {"","",false}
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([]",ObjFun_novar,", AccBytes, AccLen) -> ",nl]),
+
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit([indent(3),
+ "{?RT_BER:dynamicsort_SETOF(AccBytes),AccLen};",nl,nl]);
+ _ ->
+ emit([indent(3),"{lists:reverse(AccBytes),AccLen};",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([H|T]",Objfun,",AccBytes, AccLen) ->",nl]),
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+ gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",3,
+ mandatory,"{EncBytes,EncLen} = ",EncObj),
+ emit([",",nl]),
+ emit([indent(3),"'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(T",Objfun,","]),
+ emit(["[EncBytes|AccBytes], AccLen + EncLen).",nl,nl]).
+
+%%============================================================================
+%% Encode/decode CHOICE
+%%
+%%============================================================================
+
+gen_encode_choice(Erules,Typename,D) when record(D,type) ->
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_enc_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit({nl,nl}).
+
+gen_decode_choice(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(bytes),
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_dec_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit({".",nl}).
+
+
+%%============================================================================
+%% Encode SEQUENCE
+%%
+%%============================================================================
+
+gen_enc_sequence_call(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],Pos,Ext,EncObj) ->
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Element =
+ case TopType of
+ ['EXTERNAL'] ->
+ io_lib:format("?RT_BER:cindex(~w,NewVal,~w)",[Pos+1,Cname]);
+ _ ->
+ io_lib:format("?RT_BER:cindex(~w,Val,~w)",[Pos+1,Cname])
+ end,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ print_attribute_comment(InnerType,Pos,Prop),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,3,Prop,EncObj),
+ case Rest of
+ [] ->
+ emit({com,nl});
+ _ ->
+ emit({com,nl}),
+ gen_enc_sequence_call(Erules,TopType,Rest,Pos+1,Ext,EncObj)
+ end;
+
+gen_enc_sequence_call(_Erules,_TopType,[],_Num,_,_) ->
+ true.
+
+%%============================================================================
+%% Decode SEQUENCE
+%%
+%%============================================================================
+
+gen_dec_sequence_call(Erules,TopType,CompList,Ext,DecObjInf) ->
+ gen_dec_sequence_call1(Erules,TopType, CompList, 1, Ext,DecObjInf,[],[]).
+
+
+gen_dec_sequence_call1(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop,tags=Tags}|Rest],Num,Ext,DecObjInf,LeadingAttrAcc,ArgsAcc) ->
+ {LA,PostponedDec} =
+ gen_dec_component(Erules,TopType,Cname,Tags,Type,Num,Prop,
+ Ext,DecObjInf),
+ case Rest of
+ [] ->
+ {LA ++ LeadingAttrAcc,PostponedDec ++ ArgsAcc};
+ _ ->
+ emit({com,nl}),
+% asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+ gen_dec_sequence_call1(Erules,TopType,Rest,Num+1,Ext,DecObjInf,
+ LA++LeadingAttrAcc,PostponedDec++ArgsAcc)
+ end;
+
+gen_dec_sequence_call1(_Erules,_TopType,[],1,_,_,_,_) ->
+ no_terms.
+%%gen_dec_sequence_call1(Erules,_TopType,[],Num,_) ->
+%% true.
+
+
+
+%%----------------------------
+%%SEQUENCE mandatory
+%%----------------------------
+
+gen_dec_component(Erules,TopType,Cname,CTags,Type,Pos,Prop,Ext,DecObjInf) ->
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} -> OCFTType;
+ _ -> asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ Prop1 = case {Prop,Ext} of
+ {mandatory,{ext,Epos,_}} when Pos >= Epos ->
+ 'OPTIONAL';
+ _ ->
+ Prop
+ end,
+ print_attribute_comment(InnerType,Pos,Prop1),
+ emit(" "),
+
+ case {InnerType,DecObjInf} of
+ {{typefield,_},NotFalse} when NotFalse /= false ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},",",{next,rb},"} = "});
+ {{objectfield,_,_},_} ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},",",{next,rb},"} = "});
+ _ ->
+ asn1ct_name:new(term),
+ emit({"{",{curr,term},",",{next,bytes},",",{next,rb},"} = "})
+ end,
+ asn1ct_name:new(rb),
+ PostponedDec =
+ gen_dec_line(Erules,TopType,Cname,CTags,Type,Prop1,DecObjInf),
+ asn1ct_name:new(form),
+ PostponedDec.
+
+
+%%-------------------------------------
+%% Decode SET
+%%-------------------------------------
+
+gen_dec_set(Erules,TopType,CompList,Pos,_Ext) ->
+ TagList = get_all_choice_tags(CompList),
+ emit({indent(3),
+ {curr,tagList}," = ",{asis,TagList},",",nl}),
+ emit({indent(3),
+ "case ?RT_BER:check_if_valid_tag(Bytes, ",
+ {curr,tagList},", OptOrMand) of",nl}),
+ asn1ct_name:new(tagList),
+ asn1ct_name:new(rbCho),
+ asn1ct_name:new(choTags),
+ gen_dec_set_cases(Erules,TopType,CompList,TagList,Pos),
+ asn1ct_name:new(tag),
+ asn1ct_name:new(bytes).
+
+
+
+gen_dec_set_cases(_,_,[],_,_) ->
+ ok;
+gen_dec_set_cases(Erules,TopType,[H|T],List,Pos) ->
+ case H of
+ {'EXTENSIONMARK', _, _} ->
+ gen_dec_set_cases(Erules,TopType,T,List,Pos);
+ _ ->
+ Name = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+
+ emit({indent(6),"'",Name,"' ->",nl}),
+ case Type#type.def of
+ {'CHOICE',_NewCompList} ->
+ gen_dec_set_cases_choice(Erules,TopType,H,Pos);
+ _ ->
+ gen_dec_set_cases_type(Erules,TopType,H,Pos)
+ end,
+ gen_dec_set_cases(Erules,TopType,T,List,Pos+1)
+ end.
+
+
+
+
+gen_dec_set_cases_choice(_Erules,TopType,H,Pos) ->
+ Cname = H#'ComponentType'.name,
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- (H#'ComponentType'.typespec)#type.tag],
+ asn1ct_name:new(rbCho),
+ emit({indent(9),"{Dec, Rest, ",{curr,rbCho},"} = "}),
+ emit({"'dec_",asn1ct_gen:list2name([Cname|TopType]),
+ "'(Bytes,OptOrMand,",{asis,Tag},"),",nl}),
+ emit([" {{",Pos,",Dec}, Rest, ",{curr,rbCho},"}"]),
+ emit([";",nl,nl]).
+
+
+gen_dec_set_cases_type(Erules,TopType,H,Pos) ->
+ Cname = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+ %% always use Prop = mandatory here Prop = H#'ComponentType'.prop,
+
+ asn1ct_name:new(rbCho),
+ emit({indent(9),"{Dec, Rest, ",{curr,rbCho},"} = "}),
+ asn1ct_name:delete(bytes),
+ %% we have already seen the tag so now we must find the value
+ %% that why we always use 'mandatory' here
+ gen_dec_line(Erules,TopType,Cname,[],Type,mandatory,decObjInf),
+ asn1ct_name:new(bytes),
+
+ emit([",",nl]),
+ emit(["{{",Pos,",Dec}, Rest, ",{curr,rbCho},"}"]),
+ emit([";",nl,nl]).
+
+
+%%---------------------------------
+%% Decode SET result
+%%---------------------------------
+
+gen_dec_set_result(Erules,TopType,{CompList,_ExtList}) ->
+ gen_dec_set_result1(Erules,TopType, CompList, 1);
+gen_dec_set_result(Erules,TopType,CompList) ->
+ gen_dec_set_result1(Erules,TopType, CompList, 1).
+
+gen_dec_set_result1(Erules,TopType,
+ [#'ComponentType'{name=Cname,
+ typespec=Type,
+ prop=Prop}|Rest],Num) ->
+ gen_dec_set_component(Erules,TopType,Cname,Type,Num,Prop),
+ case Rest of
+ [] ->
+ true;
+ _ ->
+ gen_dec_set_result1(Erules,TopType,Rest,Num+1)
+ end;
+
+gen_dec_set_result1(_Erules,_TopType,[],1) ->
+ no_terms;
+gen_dec_set_result1(_Erules,_TopType,[],_Num) ->
+ true.
+
+
+gen_dec_set_component(_Erules,_TopType,_Cname,Type,Pos,Prop) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ print_attribute_comment(InnerType,Pos,Prop),
+ emit({" {",{next,term},com,{next,termList},"} =",nl}),
+ emit({" case ",{curr,termList}," of",nl}),
+ emit({" [{",Pos,com,{curr,termTmp},"}|",
+ {curr,rest},"] -> "}),
+ emit({"{",{curr,termTmp},com,
+ {curr,rest},"};",nl}),
+ case Prop of
+ 'OPTIONAL' ->
+ emit([indent(10),"_ -> {asn1_NOVALUE, ",{curr,termList},"}",nl]);
+ {'DEFAULT', DefVal} ->
+ emit([indent(10),
+ "_ -> {",{asis,DefVal},", ",{curr,termList},"}",nl]);
+ mandatory ->
+ emit([indent(10),
+ "_ -> exit({error,{asn1,{mandatory_attribute_no, ",
+ Pos,", missing}}})",nl])
+ end,
+ emit([indent(6),"end,",nl]),
+ asn1ct_name:new(rest),
+ asn1ct_name:new(term),
+ asn1ct_name:new(termList),
+ asn1ct_name:new(termTmp).
+
+
+%%---------------------------------------------
+%% Encode CHOICE
+%%---------------------------------------------
+%% for BER we currently do care (a little) if the choice has an EXTENSIONMARKER
+
+
+gen_enc_choice(Erules,TopType,Tag,CompList,_Ext) ->
+ gen_enc_choice1(Erules,TopType,Tag,CompList,_Ext).
+
+gen_enc_choice1(Erules,TopType,Tag,CompList,_Ext) ->
+ asn1ct_name:clear(),
+ emit({" {EncBytes,EncLen} = case element(1,Val) of",nl}),
+ gen_enc_choice2(Erules,TopType,CompList),
+ emit([nl," end,",nl,nl]),
+ NewTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- Tag],
+% gen_encode_tags(Erules,NewTag,"EncLen","EncBytes").
+ emit(["?RT_BER:encode_tags(TagIn ++",{asis,NewTag},", EncBytes, EncLen).",nl]).
+
+
+
+gen_enc_choice2(Erules,TopType,[H1|T]) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ emit({" ",{asis,Cname}," ->",nl}),
+ {Encobj,Assign} =
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+ case {Type#type.def,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+ {#'ObjectClassFieldType'{},{componentrelation,_,_}} ->
+ asn1ct_name:new(tmpBytes),
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Emit = ["{",{curr,tmpBytes},", _} = "],
+ {{no_attr,"ObjFun"},Emit};
+ _ ->
+ {false,[]}
+ end,
+ gen_enc_line(Erules,TopType,Cname,Type,"element(2,Val)",9,
+ mandatory,Assign,Encobj),
+ case Encobj of
+ false -> ok;
+ _ ->
+ emit({",",nl,indent(9),"{",{curr,encBytes},", ",
+ {curr,encLen},"}"})
+ end,
+ emit({";",nl}),
+ case T of
+ [] ->
+ emit([indent(6), "Else -> ",nl,
+ indent(9),"exit({error,{asn1,{invalid_choice_type,Else}}})"]);
+ _ ->
+ true
+ end,
+ gen_enc_choice2(Erules,TopType,T);
+
+gen_enc_choice2(_,_,[]) ->
+ true.
+
+
+
+
+%%--------------------------------------------
+%% Decode CHOICE
+%%--------------------------------------------
+
+gen_dec_choice(Erules,TopType, ChTag, CompList, Ext) ->
+ asn1ct_name:delete(bytes),
+ Tags = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- ChTag],
+
+ emit([" {{_,Len},",{next,bytes},
+ ", RbExp} = ?RT_BER:check_tags(TagIn++",
+ {asis,Tags},", ",
+ {curr,bytes},", OptOrMand),",nl]),
+ asn1ct_name:new(bytes),
+ asn1ct_name:new(len),
+ gen_dec_choice_indef_funs(Erules),
+ case Erules of
+ ber_bin ->
+ emit([indent(3),"case ",{curr,bytes}," of",nl]);
+ ber ->
+ emit([indent(3),
+ "case (catch ?RT_BER:peek_tag(",{curr,bytes},")) of",nl])
+ end,
+ asn1ct_name:new(tagList),
+ asn1ct_name:new(choTags),
+ gen_dec_choice_cases(Erules,TopType,CompList),
+ case Ext of
+ noext ->
+ emit([indent(6), {curr,else}," -> ",nl]),
+ emit([indent(9),"case OptOrMand of",nl,
+ indent(12),"mandatory ->","exit({error,{asn1,",
+ "{invalid_choice_tag,",{curr,else},"}}});",nl,
+ indent(12),"_ ->","exit({error,{asn1,{no_optional_tag,",
+ {curr,else},"}}})",nl,
+ indent(9),"end",nl]);
+ _ ->
+ emit([indent(6),"_ -> ",nl]),
+ emit([indent(9),"{{asn1_ExtAlt,",{curr,bytes},"},",
+ empty_lb(Erules),", RbExp}",nl])
+ end,
+ emit([indent(3),"end"]),
+ asn1ct_name:new(tag),
+ asn1ct_name:new(else).
+
+gen_dec_choice_indef_funs(Erules) ->
+ emit({indent(3),"IndefEndBytes = fun(indefinite,",indefend_match(Erules,used_var),
+ ")-> R; (_,B)-> B end,",nl}),
+ emit({indent(3),"IndefEndRb = fun(indefinite,",indefend_match(Erules,unused_var),
+ ")-> 2; (_,_)-> 0 end,",nl}).
+
+
+gen_dec_choice_cases(_,_, []) ->
+ ok;
+gen_dec_choice_cases(Erules,TopType, [H|T]) ->
+ asn1ct_name:push(rbCho),
+ Name = H#'ComponentType'.name,
+ emit([nl,"%% '",Name,"'",nl]),
+ Fcases = fun([T1,T2|Tail],Fun) ->
+ emit([indent(6),match_tag(Erules,T1)," ->",nl]),
+ gen_dec_choice_cases_type(Erules,TopType, H),
+ Fun([T2|Tail],Fun);
+ ([T1],_) ->
+ emit([indent(6),match_tag(Erules,T1)," ->",nl]),
+ gen_dec_choice_cases_type(Erules,TopType, H)
+ end,
+ Fcases(H#'ComponentType'.tags,Fcases),
+ asn1ct_name:pop(rbCho),
+ gen_dec_choice_cases(Erules,TopType, T).
+
+
+
+gen_dec_choice_cases_type(Erules,TopType,H) ->
+ Cname = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+ Prop = H#'ComponentType'.prop,
+ emit({indent(9),"{Dec, Rest, ",{curr,rbCho},"} = "}),
+ gen_dec_line(Erules,TopType,Cname,[],Type,Prop,false),
+ emit([",",nl,indent(9),"{{",{asis,Cname},
+ ", Dec}, IndefEndBytes(Len,Rest), RbExp + ",
+ {curr,rbCho}," + IndefEndRb(Len,Rest)};",nl,nl]).
+
+encode_tag_val(Erules,{Class,TagNo}) when integer(TagNo) ->
+ Rtmod = rtmod(Erules),
+ Rtmod:encode_tag_val({asn1ct_gen_ber:decode_class(Class),
+ 0,TagNo});
+encode_tag_val(Erules,{Class,TypeName}) ->
+ Rtmod = rtmod(Erules),
+ Rtmod:encode_tag_val({asn1ct_gen_ber:decode_class(Class),
+ 0,asn1ct_gen_ber:decode_type(TypeName)}).
+
+
+match_tag(ber_bin,Arg) ->
+ match_tag_with_bitsyntax(Arg);
+match_tag(Erules,Arg) ->
+ io_lib:format("~p",[encode_tag_val(Erules,Arg)]).
+
+match_tag_with_bitsyntax({Class,TagNo}) when integer(TagNo) ->
+ match_tag_with_bitsyntax1({asn1ct_gen_ber:decode_class(Class),
+ 0,TagNo});
+match_tag_with_bitsyntax({Class,TypeName}) ->
+ match_tag_with_bitsyntax1({asn1ct_gen_ber:decode_class(Class),
+ 0,asn1ct_gen_ber:decode_type(TypeName)}).
+
+match_tag_with_bitsyntax1({Class, _Form, TagNo}) when (TagNo =< 30) ->
+ io_lib:format("<<~p:2,_:1,~p:5,_/binary>>",[Class bsr 6,TagNo]);
+
+match_tag_with_bitsyntax1({Class, _Form, TagNo}) ->
+ {Octets,Len} = mk_object_val(TagNo),
+ OctForm = case Len of
+ 1 -> "~p";
+ 2 -> "~p,~p";
+ 3 -> "~p,~p,~p";
+ 4 -> "~p,~p,~p,~p"
+ end,
+ io_lib:format("<<~p:2,_:1,31:5," ++ OctForm ++ ",_/binary>>",
+ [Class bsr 6] ++ Octets).
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+
+get_all_choice_tags(ComponentTypeList) ->
+ get_all_choice_tags(ComponentTypeList,[]).
+
+get_all_choice_tags([],TagList) ->
+ TagList;
+get_all_choice_tags([H|T],TagList) ->
+ Tags = H#'ComponentType'.tags,
+ get_all_choice_tags(T, TagList ++ [{H#'ComponentType'.name, Tags}]).
+
+
+
+%%---------------------------------------
+%% Generate the encode/decode code
+%%---------------------------------------
+
+gen_enc_line(Erules,TopType,Cname,
+ Type=#type{constraint=[{componentrelation,_,_}],
+ def=#'ObjectClassFieldType'{type={typefield,_}}},
+ Element,Indent,OptOrMand=mandatory,EncObj)
+ when list(Element) ->
+ asn1ct_name:new(tmpBytes),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,tmpBytes},",_} = "],EncObj);
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,EncObj)
+ when list(Element) ->
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,encBytes},",",{curr,encLen},"} = "],EncObj).
+
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,Assign,EncObj)
+ when list(Element) ->
+ IndDeep = indent(Indent),
+
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- Type#type.tag],
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ emit(IndDeep),
+ emit(Assign),
+ gen_optormand_case(OptOrMand,Erules,TopType,Cname,Type,InnerType,WhatKind,
+ Element),
+ case {Type,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+% #type{constraint=[{tableconstraint_info,RefedFieldName}],
+% def={typefield,_}} ->
+ {#type{def=#'ObjectClassFieldType'{type={typefield,_},
+ fieldname=RefedFieldName}},
+ {componentrelation,_,_}} ->
+ {_LeadingAttrName,Fun} = EncObj,
+ case RefedFieldName of
+ {notype,T} ->
+ throw({error,{notype,type_from_object,T}});
+ {Name,RestFieldNames} when atom(Name) ->
+ case OptOrMand of
+ mandatory -> ok;
+ _ ->
+% emit(["{",{curr,tmpBytes},",",{curr,tmpLen},
+ emit(["{",{curr,tmpBytes},", _} = "])
+%% asn1ct_name:new(tmpBytes),
+%% asn1ct_name:new(tmpLen)
+ end,
+ emit({Fun,"(",{asis,Name},", ",Element,", [], ",
+ {asis,RestFieldNames},"),",nl}),
+ emit(IndDeep),
+ case OptOrMand of
+ mandatory ->
+ emit({"{",{curr,encBytes},", ",{curr,encLen},"} = "}),
+ emit({"?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},")"});
+ _ ->
+% emit({"{",{next,tmpBytes},", _} = "}),
+ emit({"{",{next,tmpBytes},", ",{curr,tmpLen},
+ "} = "}),
+ emit({"?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},"),",nl}),
+ emit(IndDeep),
+ emit({"{",{next,tmpBytes},", ",{curr,tmpLen},"}"})
+ end;
+ _ ->
+ throw({asn1,{'internal error'}})
+ end;
+% #type{constraint=[{tableconstraint_info,_}],
+% def={objectfield,PrimFieldName1,PFNList}} ->
+ {{#'ObjectClassFieldType'{type={objectfield,PrimFieldName1,
+ PFNList}},_},
+ {componentrelation,_,_}} ->
+ %% this is when the dotted list in the FieldName has more
+ %% than one element
+ {_LeadingAttrName,Fun} = EncObj,
+ emit({"?RT_BER:encode_open_type(",Fun,"(",{asis,PrimFieldName1},
+ ", ",Element,", ",{asis,PFNList},"),",{asis,Tag},")"});
+ _ ->
+ case WhatKind of
+ {primitive,bif} ->
+ EncType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{
+ type={fixedtypevaluefield,
+ _,Btype}} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ asn1ct_gen_ber:gen_encode_prim(ber,EncType,{asis,Tag},
+ Element);
+ {notype,_} ->
+ emit({"'enc_",InnerType,"'(",Element,", ",{asis,Tag},")"});
+ 'ASN1_OPEN_TYPE' ->
+ asn1ct_gen_ber:gen_encode_prim(ber,Type#type{def='ASN1_OPEN_TYPE'},{asis,Tag},Element);
+ _ ->
+ {EncFunName, _, _} =
+ mkfuncname(TopType,Cname,WhatKind,enc),
+ case {WhatKind,Type#type.tablecinf,EncObj} of
+ {{constructed,bif},[{objfun,_}|_R],{_,Fun}} ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},
+ ", ",Fun,")"]);
+ _ ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},")"])
+ end
+ end
+ end,
+ case OptOrMand of
+ mandatory -> true;
+ _ ->
+ emit({nl,indent(7),"end"})
+ end.
+
+
+
+gen_optormand_case(mandatory,_,_,_,_,_,_, _) ->
+ ok;
+gen_optormand_case('OPTIONAL',Erules,_,_,_,_,_,Element) ->
+ emit({" case ",Element," of",nl}),
+ emit({indent(9),"asn1_NOVALUE -> {",
+ empty_lb(Erules),",0};",nl}),
+ emit({indent(9),"_ ->",nl,indent(12)});
+gen_optormand_case({'DEFAULT',DefaultValue},Erules,TopType,Cname,Type,
+ InnerType,WhatKind,Element) ->
+ CurrMod = get(currmod),
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit(" case catch "),
+ asn1ct_gen:gen_check_call(TopType,Cname,Type,InnerType,
+ WhatKind,{asis,DefaultValue},
+ Element),
+ emit({" of",nl}),
+ emit({indent(12),"true -> {[],0};",nl});
+ _ ->
+ emit({" case ",Element," of",nl}),
+ emit({indent(9),"asn1_DEFAULT -> {",
+ empty_lb(Erules),
+ ",0};",nl}),
+ case DefaultValue of
+ #'Externalvaluereference'{module=CurrMod,
+ value=V} ->
+ emit({indent(9),"?",{asis,V}," -> {",
+ empty_lb(Erules),",0};",nl});
+ _ ->
+ emit({indent(9),{asis,
+ DefaultValue}," -> {",
+ empty_lb(Erules),",0};",nl})
+ end
+ end,
+ emit({indent(9),"_ ->",nl,indent(12)}).
+
+
+
+
+gen_dec_line_sof(_Erules,TopType,Cname,Type,ObjFun) ->
+
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- Type#type.tag],
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ case WhatKind of
+ {primitive,bif} ->
+ asn1ct_name:delete(len),
+
+ asn1ct_name:new(len),
+ emit(["fun(FBytes,_,_)->",nl]),
+ EncType = case Type#type.def of
+ #'ObjectClassFieldType'{
+ type={fixedtypevaluefield,
+ _,Btype}} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ asn1ct_gen_ber:gen_dec_prim(ber,EncType,"FBytes",Tag,
+ [],no_length,?PRIMITIVE,
+ mandatory),
+ emit([nl,"end, []"]);
+ _ ->
+ case ObjFun of
+ [] ->
+ {DecFunName, _, _} =
+ mkfunname(TopType,Cname,WhatKind,dec,3),
+ emit([DecFunName,", ",{asis,Tag}]);
+ _ ->
+ {DecFunName, _, _} =
+ mkfunname(TopType,Cname,WhatKind,dec,4),
+ emit([DecFunName,", ",{asis,Tag},", ObjFun"])
+ end
+ end.
+
+
+gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- Type#type.tag],
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+ PostpDec =
+ case OptOrMand of
+ mandatory ->
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,mandatory,", mandatory, ",
+ DecObjInf,OptOrMand);
+ _ -> %optional or default
+ case {CTags,Erules} of
+ {[CTag],ber_bin} ->
+ emit(["case ",{curr,bytes}," of",nl]),
+ emit([match_tag(Erules,CTag)," ->",nl]),
+ PostponedDec =
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,mandatory,
+ ", opt_or_default, ",DecObjInf,
+ OptOrMand),
+ emit([";",nl]),
+ emit(["_ ->",nl]),
+ case OptOrMand of
+ {'DEFAULT', Def} ->
+ emit(["{",{asis,Def},",",
+ BytesVar,", 0 }",nl]);
+ 'OPTIONAL' ->
+ emit(["{ asn1_NOVALUE, ",
+ BytesVar,", 0 }",nl])
+ end,
+ emit("end"),
+ PostponedDec;
+ _ ->
+ emit("case (catch "),
+ PostponedDec =
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,OptOrMand,
+ ", opt_or_default, ",DecObjInf,
+ OptOrMand),
+ emit([") of",nl]),
+ case OptOrMand of
+ {'DEFAULT', Def} ->
+ emit(["{'EXIT',{error,{asn1,{no_optional_tag,_}}}}",
+ " -> {",{asis,Def},",",
+ BytesVar,", 0 };",nl]);
+ 'OPTIONAL' ->
+ emit(["{'EXIT',{error,{asn1,{no_optional_tag,_}}}}",
+ " -> { asn1_NOVALUE, ",
+ BytesVar,", 0 };",nl])
+ end,
+ asn1ct_name:new(casetmp),
+ emit([{curr,casetmp},"-> ",{curr,casetmp},nl,"end"]),
+ PostponedDec
+ end
+ end,
+ case DecObjInf of
+ {Cname,ObjSet} -> % this must be the component were an object is
+ %% choosen from the object set according to the table
+ %% constraint.
+ {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
+ PostpDec};
+ _ -> {[],PostpDec}
+ end.
+
+
+gen_dec_call({typefield,_},Erules,_,_,Type,_,Tag,_,_,false,_) ->
+ %% this in case of a choice with typefield components
+ asn1ct_name:new(reason),
+ {FirstPFName,RestPFName} =
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ emit([nl,indent(6),"begin",nl]),
+ emit([indent(9),"{OpenDec,TmpRest,TmpRbCho} =",nl,indent(12),
+ "?RT_BER:decode_open_type(",Erules,",",{curr,bytes},",",
+ {asis,Tag},"),",nl]),
+ emit([indent(9),"case (catch ObjFun(",{asis,FirstPFName},
+ ", OpenDec, [], ",{asis,RestPFName},
+ ")) of", nl]),%% ??? What about Tag
+ emit([indent(12),"{'EXIT',",{curr,reason},"} ->",nl]),
+%% emit({indent(15),"throw({runtime_error,{'Type not ",
+%% "compatible with tableconstraint', OpenDec}});",nl}),
+ emit([indent(15),"exit({'Type not ",
+ "compatible with table constraint', ",{curr,reason},"});",nl]),
+ emit([indent(12),"{TmpDec,_ ,_} ->",nl]),
+ emit([indent(15),"{TmpDec, TmpRest, TmpRbCho}",nl]),
+ emit([indent(9),"end",nl,indent(6),"end",nl]),
+ [];
+gen_dec_call({typefield,_},_Erules,_,Cname,Type,_BytesVar,Tag,_,_,
+ _DecObjInf,OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",{curr,bytes},",",{asis,Tag},")"]),
+ RefedFieldName =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ [{Cname,RefedFieldName,
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+% asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),[],OptOrMandComp}];
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call({objectfield,PrimFieldName,PFNList},_Erules,_,Cname,_,_,Tag,_,_,_,
+ OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",{curr,bytes},",",{asis,Tag},")"]),
+ [{Cname,{PrimFieldName,PFNList},
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+% asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),[],OptOrMandComp}];
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call(InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,PrimOptOrMand,
+ OptOrMand,DecObjInf,_) ->
+ WhatKind = asn1ct_gen:type(InnerType),
+ gen_dec_call1(WhatKind,InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,
+ PrimOptOrMand,OptOrMand),
+ case DecObjInf of
+ {Cname,{_,OSet,UniqueFName,ValIndex}} ->
+ Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ ValueMatch = value_match(ValIndex,Term),
+ emit({",",nl,"ObjFun = 'getdec_",OSet,"'(",
+% {asis,UniqueFName},", ",{curr,term},")"});
+ {asis,UniqueFName},", ",ValueMatch,")"});
+ _ ->
+ ok
+ end,
+ [].
+gen_dec_call1({primitive,bif},InnerType,Erules,_,_,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ case InnerType of
+ {fixedtypevaluefield,_,Btype} ->
+ asn1ct_gen_ber:gen_dec_prim(Erules,Btype,BytesVar,Tag,[],no_length,
+ ?PRIMITIVE,OptOrMand);
+ _ ->
+ asn1ct_gen_ber:gen_dec_prim(Erules,Type,BytesVar,Tag,[],no_length,
+ ?PRIMITIVE,OptOrMand)
+ end;
+gen_dec_call1('ASN1_OPEN_TYPE',_InnerType,Erules,_,_,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ asn1ct_gen_ber:gen_dec_prim(Erules,Type#type{def='ASN1_OPEN_TYPE'},
+ BytesVar,Tag,[],no_length,
+ ?PRIMITIVE,OptOrMand);
+gen_dec_call1(WhatKind,_,_Erules,TopType,Cname,Type,_,Tag,_,OptOrMand) ->
+ {DecFunName,_,_} =
+ mkfuncname(TopType,Cname,WhatKind,dec),
+ case {WhatKind,Type#type.tablecinf} of
+ {{constructed,bif},[{objfun,_}|_R]} ->
+ emit({DecFunName,"(",{curr,bytes},OptOrMand,{asis,Tag},", ObjFun)"});
+ _ ->
+ emit({DecFunName,"(",{curr,bytes},OptOrMand,{asis,Tag},")"})
+ end.
+
+
+%%------------------------------------------------------
+%% General and special help functions (not exported)
+%%------------------------------------------------------
+
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+mkvlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
+ emit([{var,H},Sep]),
+ mkvlist([T1|T], Sep);
+mkvlist([H|T], Sep) ->
+ emit([{var,H}]),
+ mkvlist(T, Sep);
+mkvlist([], _) ->
+ true.
+
+mkvlist(L) ->
+ mkvlist(L,", ").
+
+mkvplus(L) ->
+ mkvlist(L," + ").
+
+extensible(CompList) when list(CompList) ->
+ noext;
+extensible({RootList,ExtList}) ->
+ {ext,length(RootList)+1,length(ExtList)}.
+
+
+print_attribute_comment(InnerType,Pos,Prop) ->
+ CommentLine = "%%-------------------------------------------------",
+ emit([nl,CommentLine]),
+ case InnerType of
+ {typereference,_,Name} ->
+ emit([nl,"%% attribute number ",Pos," with type ",Name]);
+ {'Externaltypereference',_,XModule,Name} ->
+ emit([nl,"%% attribute number ",Pos," External ",XModule,":",Name]);
+ _ ->
+ emit([nl,"%% attribute number ",Pos," with type ",InnerType])
+ end,
+ case Prop of
+ mandatory ->
+ continue;
+ {'DEFAULT', Def} ->
+ emit([" DEFAULT = ",{asis,Def}]);
+ 'OPTIONAL' ->
+ emit([" OPTIONAL"])
+ end,
+ emit([nl,CommentLine,nl]).
+
+
+mkfuncname(TopType,Cname,WhatKind,DecOrEnc) ->
+ CurrMod = get(currmod),
+ case WhatKind of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ F = lists:concat(["'",DecOrEnc,"_",EType,"'"]),
+ {F, "?MODULE", F};
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ {lists:concat(["'",Mod,"':'",DecOrEnc,"_",EType,"'"]),Mod,
+ lists:concat(["'",DecOrEnc,"_",EType,"'"])};
+ {constructed,bif} ->
+ F = lists:concat(["'",DecOrEnc,"_",asn1ct_gen:list2name([Cname|TopType]),"'"]),
+ {F, "?MODULE", F}
+ end.
+
+mkfunname(TopType,Cname,WhatKind,DecOrEnc,Arity) ->
+ CurrMod = get(currmod),
+ case WhatKind of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ F = lists:concat(["fun '",DecOrEnc,"_",EType,"'/",Arity]),
+ {F, "?MODULE", F};
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ {lists:concat(["{'",Mod,"','",DecOrEnc,"_",EType,"'}"]),Mod,
+ lists:concat(["'",DecOrEnc,"_",EType,"'"])};
+ {constructed,bif} ->
+ F =
+ lists:concat(["fun '",DecOrEnc,"_",
+ asn1ct_gen:list2name([Cname|TopType]),"'/",
+ Arity]),
+ {F, "?MODULE", F}
+ end.
+
+empty_lb(ber) ->
+ "[]";
+empty_lb(ber_bin) ->
+ "<<>>".
+
+rtmod(ber) ->
+ list_to_atom(?RT_BER);
+rtmod(ber_bin) ->
+ list_to_atom(?RT_BER_BIN).
+
+indefend_match(ber,used_var) ->
+ "[0,0|R]";
+indefend_match(ber,unused_var) ->
+ "[0,0|_R]";
+indefend_match(ber_bin,used_var) ->
+ "<<0,0,R/binary>>";
+indefend_match(ber_bin,unused_var) ->
+ "<<0,0,_R/binary>>".
+
+notice_value_match() ->
+ Module = get(currmod),
+ put(value_match,{true,Module}).
+
+value_match(Index,Value) when atom(Value) ->
+ value_match(Index,atom_to_list(Value));
+value_match([],Value) ->
+ Value;
+value_match([{VI,_Cname}|VIs],Value) ->
+ value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
+value_match1(Value,[],Acc,Depth) ->
+ Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
+value_match1(Value,[{VI,_Cname}|VIs],Acc,Depth) ->
+ value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
new file mode 100644
index 0000000000..0684ffa084
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
@@ -0,0 +1,1357 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_constructed_ber_bin_v2.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_constructed_ber_bin_v2).
+
+-export([gen_encode_sequence/3]).
+-export([gen_decode_sequence/3]).
+-export([gen_encode_set/3]).
+-export([gen_decode_set/3]).
+-export([gen_encode_sof/4]).
+-export([gen_decode_sof/4]).
+-export([gen_encode_choice/3]).
+-export([gen_decode_choice/3]).
+
+
+-include("asn1_records.hrl").
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+-import(asn1ct_constructed_ber,[match_tag/2]).
+
+-define(ASN1CT_GEN_BER,asn1ct_gen_ber_bin_v2).
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE (and SET)
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+
+ %% if EXTERNAL type the input value must be transformed to
+ %% ASN1 1990 format
+ ValName =
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([indent(4),
+ "NewVal = asn1rt_check:transform_to_EXTERNAL1990(Val),",
+ nl]),
+ "NewVal";
+ _ ->
+ "Val"
+ end,
+
+ {SeqOrSet,TableConsInfo,CompList} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {'SEQUENCE',TCI,CL};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {'SET',TCI,CL}
+ end,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+
+%% don't match recordname for now, because of compatibility reasons
+%% emit(["{'",asn1ct_gen:list2rname(Typename),"'"]),
+ emit(["{_"]),
+ case length(CompList1) of
+ 0 ->
+ true;
+ CompListLen ->
+ emit([","]),
+ mkcindexlist([Tc || Tc <- lists:seq(1,CompListLen)])
+ end,
+ emit(["} = ",ValName,",",nl]),
+ EncObj =
+ case TableConsInfo of
+ #simpletableattributes{usedclassfield=Used,
+ uniqueclassfield=Unique} when Used /= Unique ->
+ false;
+ %% ObjectSet, name of the object set in constraints
+ %%
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex} -> %% N is index of attribute that determines constraint
+ OSDef =
+ case ObjectSet of
+ {Module,OSName} ->
+ asn1_db:dbget(Module,OSName);
+ OSName ->
+ asn1_db:dbget(get(currmod),OSName)
+ end,
+% io:format("currmod: ~p~nOSName: ~p~nAttrN: ~p~nN: ~p~nUniqueFieldName: ~p~n",
+% [get(currmod),OSName,AttrN,N,UniqueFieldName]),
+ case (OSDef#typedef.typespec)#'ObjectSet'.gen of
+ true ->
+ ObjectEncode =
+ asn1ct_gen:un_hyphen_var(lists:concat(['Obj',
+ AttrN])),
+ emit([ObjectEncode," = ",nl]),
+ emit([" 'getenc_",ObjectSet,"'(",{asis,UniqueFieldName},
+ ", ",nl]),
+ ValueMatch = value_match(ValueIndex,
+ lists:concat(["Cindex",N])),
+ emit([indent(35),ValueMatch,"),",nl]),
+ {AttrN,ObjectEncode};
+ _ ->
+ false
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ %% when the simpletableattributes was at an outer
+ %% level and the objfun has been passed through the
+ %% function call
+ {"got objfun through args","ObjFun"};
+ _ ->
+ false
+ end
+ end,
+
+ gen_enc_sequence_call(Erules,Typename,CompList1,1,Ext,EncObj),
+
+ emit([nl," BytesSoFar = "]),
+ case SeqOrSet of
+ 'SET' when (D#type.def)#'SET'.sorted == dynamic ->
+ emit("?RT_BER:dynamicsort_SET_components(["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["]),",nl]);
+ _ ->
+ emit("["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["],",nl])
+ end,
+ emit("LenSoFar = "),
+ case asn1ct_name:all(encLen) of
+ [] -> emit("0");
+ AllLengths ->
+ mkvplus(AllLengths)
+ end,
+ emit([",",nl]),
+ emit(["?RT_BER:encode_tags(TagIn, BytesSoFar, LenSoFar)."
+ ,nl]).
+
+gen_decode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(tag),
+ #'SEQUENCE'{tablecinf=TableConsInfo,components=CList} = D#type.def,
+ Ext = extensible(CList),
+ CompList = case CList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CList
+ end,
+
+ emit([" %%-------------------------------------------------",nl]),
+ emit([" %% decode tag and length ",nl]),
+ emit([" %%-------------------------------------------------",nl]),
+
+ asn1ct_name:new(tlv),
+ case CompList of
+ EmptyCL when EmptyCL == [];EmptyCL == {[],[]}-> % empty sequence
+ true;
+ _ ->
+ emit([{curr,tlv}," = "])
+ end,
+ emit(["?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(tlv),
+ asn1ct_name:new(v),
+
+ {DecObjInf,UniqueFName,ValueIndex} =
+ case TableConsInfo of
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValIndex} ->
+% {ObjectSet,AttrN,_N,UniqueFieldName} ->%% N is index of attribute that determines constraint
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName,ValIndex}},
+ UniqueFieldName,ValIndex};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName,ValIndex}
+ end;
+ _ ->
+% case D#type.tablecinf of
+% [{objfun,_}|_] ->
+% {{"got objfun through args","ObjFun"},false,false};
+% _ ->
+ {false,false,false}
+% end
+ end,
+ case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
+ no_terms -> % an empty sequence
+ emit([nl,nl]),
+ demit(["Result = "]), %dbg
+ %% return value as record
+ asn1ct_name:new(rb),
+ emit([" {'",asn1ct_gen:list2rname(Typename),"'}.",nl,nl]);
+ {LeadingAttrTerm,PostponedDecArgs} ->
+ emit([com,nl,nl]),
+ case {LeadingAttrTerm,PostponedDecArgs} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],PostponedDecArgs} ->
+ DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
+ ValueMatch = value_match(ValueIndex,Term),
+ emit([DecObj," =",nl," 'getdec_",ObjSet,"'(",
+ {asis,UniqueFName},", ",ValueMatch,"),",nl]),
+ gen_dec_postponed_decs(DecObj,PostponedDecArgs)
+ end,
+ demit(["Result = "]), %dbg
+ %% return value as record
+ case Ext of
+ {ext,_,_} ->
+ emit(["case ",{prev,tlv}," of [] -> true; _ -> true end, % ... extra fields skipped",nl]);
+ noext ->
+ emit(["case ",{prev,tlv}," of",nl,
+ "[] -> true;",
+ "_ -> exit({error,{asn1, {unexpected,",{prev,tlv},
+ "}}}) % extra fields not allowed",nl,
+ "end,",nl])
+ end,
+ asn1ct_name:new(rb),
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([" OldFormat={'",asn1ct_gen:list2rname(Typename),
+ "', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["},",nl]),
+ emit([" asn1rt_check:transform_to_EXTERNAL1994",
+ "(OldFormat).",nl]);
+ _ ->
+ emit([" {'",asn1ct_gen:list2rname(Typename),"', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["}.",nl,nl])
+ end
+ end.
+
+gen_dec_postponed_decs(_,[]) ->
+ emit(nl);
+gen_dec_postponed_decs(DecObj,[{_Cname,{FirstPFN,PFNList},Term,
+ TmpTerm,_Tag,OptOrMand}|Rest]) ->
+
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+ asn1ct_name:new(tmptlv),
+
+ emit([Term," = ",nl]),
+ N = case OptOrMand of
+ mandatory -> 0;
+ 'OPTIONAL' ->
+ emit_opt_or_mand_check(asn1_NOVALUE,TmpTerm),
+ 6;
+ {'DEFAULT',Val} ->
+ emit_opt_or_mand_check(Val,TmpTerm),
+ 6
+ end,
+ emit([indent(N+3),"case (catch ",DecObj,"(",{asis,FirstPFN},
+ ", ",TmpTerm,", ",{asis,PFNList},")) of",nl]),
+ emit([indent(N+6),"{'EXIT', ",{curr,reason},"} ->",nl]),
+ emit([indent(N+9),"exit({'Type not compatible with table constraint',",
+ {curr,reason},"});",nl]),
+ emit([indent(N+6),{curr,tmpterm}," ->",nl]),
+ emit([indent(N+9),{curr,tmpterm},nl]),
+
+ case OptOrMand of
+ mandatory -> emit([indent(N+3),"end,",nl]);
+ _ ->
+ emit([indent(N+3),"end",nl,
+ indent(3),"end,",nl])
+ end,
+ gen_dec_postponed_decs(DecObj,Rest).
+
+emit_opt_or_mand_check(Value,TmpTerm) ->
+ emit([indent(3),"case ",TmpTerm," of",nl,
+ indent(6),{asis,Value}," ->",{asis,Value},";",nl,
+ indent(6),"_ ->",nl]).
+
+%%============================================================================
+%% Encode/decode SET
+%%
+%%============================================================================
+
+gen_encode_set(Erules,Typename,D) when record(D,type) ->
+ gen_encode_sequence(Erules,Typename,D).
+
+gen_decode_set(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(tag),
+ #'SET'{tablecinf=TableConsInfo,components=TCompList} = D#type.def,
+ Ext = extensible(TCompList),
+ CompList = case TCompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> TCompList
+ end,
+
+ asn1ct_name:clear(),
+ asn1ct_name:new(tlv),
+ case CompList of
+ EmptyCL when EmptyCL == [];EmptyCL == {[],[]}-> % empty sequence
+ true;
+ _ ->
+ emit([{curr,tlv}," = "])
+ end,
+ emit(["?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(v),
+
+
+ {DecObjInf,UniqueFName} =
+ case TableConsInfo of
+ {ObjectSet,AttrN,_N,UniqueFieldName} ->%% N is index of attribute that determines constraint
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName}},
+ UniqueFieldName};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName}
+ end;
+ _ ->
+ {false,false}
+ end,
+
+ case CompList of
+ [] -> % empty set
+ true;
+ _ ->
+ emit(["SetFun = fun(FunTlv) ->", nl]),
+ emit(["case FunTlv of ",nl]),
+ NextNum = gen_dec_set_cases(Erules,Typename,CompList,1),
+ emit([indent(6), {curr,else}," -> ",nl,
+ indent(9),"{",NextNum,", ",{curr,else},"}",nl]),
+ emit([indent(3),"end",nl]),
+ emit([indent(3),"end,",nl]),
+
+ emit(["PositionList = [SetFun(TempTlv)|| TempTlv <- ",{curr,tlv},"],",nl]),
+ asn1ct_name:new(tlv),
+ emit([{curr,tlv}," = [Stlv || {_,Stlv} <- lists:sort(PositionList)],",nl]),
+ asn1ct_name:new(tlv)
+
+ end,
+ case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
+ no_terms -> % an empty sequence
+ emit([nl,nl]),
+ demit(["Result = "]), %dbg
+ %% return value as record
+ emit([" {'",asn1ct_gen:list2rname(Typename),"'}.",nl]);
+ {LeadingAttrTerm,PostponedDecArgs} ->
+ emit([com,nl,nl]),
+ case {LeadingAttrTerm,PostponedDecArgs} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],PostponedDecArgs} ->
+ DecObj = lists:concat(['DecObj',LeadingAttr,Term]),
+ emit([DecObj," =",nl," 'getdec_",ObjSet,"'(",
+ {asis,UniqueFName},", ",Term,"),",nl]),
+ gen_dec_postponed_decs(DecObj,PostponedDecArgs)
+ end,
+ demit(["Result = "]), %dbg
+ %% return value as record
+ case Ext of
+ {ext,_,_} ->
+ emit(["case ",{prev,tlv}," of [] -> true; _ -> true end, % ... extra fields skipped",nl]);
+ noext ->
+ emit(["case ",{prev,tlv}," of",nl,
+ "[] -> true;",
+ "_ -> exit({error,{asn1, {unexpected,",{prev,tlv},
+ "}}}) % extra fields not allowed",nl,
+ "end,",nl])
+ end,
+ emit([" {'",asn1ct_gen:list2rname(Typename),"', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["}.",nl])
+ end.
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE OF and SET OF
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sof(Erules,Typename,_InnerTypename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, Cont} = D#type.def,
+
+ Objfun = case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+
+ emit([" {EncBytes,EncLen} = 'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(Val",Objfun,",[],0),",nl]),
+
+ emit([" ?RT_BER:encode_tags(TagIn, EncBytes, EncLen).",nl,nl]),
+
+ gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont).
+
+
+gen_decode_sof(Erules,TypeName,_InnerTypeName,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, _TypeTag, Cont} =
+ case D#type.def of
+ {'SET OF',_Cont} -> {'SET OF','SET',_Cont};
+ {'SEQUENCE OF',_Cont} -> {'SEQUENCE OF','SEQUENCE',_Cont}
+ end,
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+
+ emit([" %%-------------------------------------------------",nl]),
+ emit([" %% decode tag and length ",nl]),
+ emit([" %%-------------------------------------------------",nl]),
+
+ asn1ct_name:new(tlv),
+ emit([{curr,tlv},
+ " = ?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(v),
+
+ emit(["["]),
+
+ InnerType = asn1ct_gen:get_inner(Cont#type.def),
+ ContName = case asn1ct_gen:type(InnerType) of
+ Atom when atom(Atom) -> Atom;
+ _ -> TypeNameSuffix
+ end,
+%% fix me
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ []
+ end,
+ gen_dec_line(Erules,TypeName,ContName,[],Cont,mandatory,ObjFun),
+ %% gen_dec_line_sof(Erules,Typename,ContName,Cont,ObjFun),
+ emit([" || ",{curr,v}," <- ",{curr,tlv},"].",nl,nl,nl]).
+
+
+gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
+ when record(Cont,type)->
+
+ {Objfun,Objfun_novar,EncObj} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _",{no_attr,"ObjFun"}};
+ _ ->
+ {"","",false}
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([]",Objfun_novar,", AccBytes, AccLen) -> ",nl]),
+
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit([indent(3),
+ "{?RT_BER:dynamicsort_SETOF(AccBytes),AccLen};",nl,nl]);
+ _ ->
+ emit([indent(3),"{lists:reverse(AccBytes),AccLen};",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([H|T]",Objfun,",AccBytes, AccLen) ->",nl]),
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+ gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",3,
+ mandatory,"{EncBytes,EncLen} = ",EncObj),
+ emit([",",nl]),
+ emit([indent(3),"'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(T",Objfun,","]),
+ emit(["[EncBytes|AccBytes], AccLen + EncLen).",nl,nl]).
+
+%%============================================================================
+%% Encode/decode CHOICE
+%%
+%%============================================================================
+
+gen_encode_choice(Erules,Typename,D) when record(D,type) ->
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_enc_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit([nl,nl]).
+
+gen_decode_choice(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(bytes),
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_dec_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit([".",nl]).
+
+
+%%============================================================================
+%% Encode SEQUENCE
+%%
+%%============================================================================
+
+gen_enc_sequence_call(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],Pos,Ext,EncObj) ->
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Element =
+ case TopType of
+ ['EXTERNAL'] ->
+ io_lib:format("Cindex~w",[Pos]);
+ _ ->
+ io_lib:format("Cindex~w",[Pos])
+ end,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ print_attribute_comment(InnerType,Pos,Cname,Prop),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,3,Prop,EncObj),
+ emit([com,nl]),
+ gen_enc_sequence_call(Erules,TopType,Rest,Pos+1,Ext,EncObj);
+
+gen_enc_sequence_call(_Erules,_TopType,[],_Num,_,_) ->
+ true.
+
+%%============================================================================
+%% Decode SEQUENCE
+%%
+%%============================================================================
+
+gen_dec_sequence_call(Erules,TopType,CompList,Ext,DecObjInf) ->
+ gen_dec_sequence_call1(Erules,TopType, CompList, 1, Ext,DecObjInf,[],[]).
+
+
+gen_dec_sequence_call1(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop,tags=Tags}|Rest],Num,Ext,DecObjInf,LeadingAttrAcc,ArgsAcc) ->
+ {LA,PostponedDec} =
+ gen_dec_component(Erules,TopType,Cname,Tags,Type,Num,Prop,
+ Ext,DecObjInf),
+ case Rest of
+ [] ->
+ {LA ++ LeadingAttrAcc,PostponedDec ++ ArgsAcc};
+ _ ->
+ emit([com,nl]),
+ asn1ct_name:new(bytes),
+ gen_dec_sequence_call1(Erules,TopType,Rest,Num+1,Ext,DecObjInf,
+ LA++LeadingAttrAcc,PostponedDec++ArgsAcc)
+ end;
+
+gen_dec_sequence_call1(_Erules,_TopType,[],1,_,_,_,_) ->
+ no_terms.
+
+
+%%----------------------------
+%%SEQUENCE mandatory
+%%----------------------------
+
+gen_dec_component(Erules,TopType,Cname,CTags,Type,Pos,Prop,Ext,DecObjInf) ->
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} -> OCFTType;
+ _ -> asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ Prop1 = case {Prop,Ext} of
+ {mandatory,{ext,Epos,_}} when Pos >= Epos ->
+ 'OPTIONAL';
+ _ ->
+ Prop
+ end,
+ print_attribute_comment(InnerType,Pos,Cname,Prop1),
+ asn1ct_name:new(term),
+ emit_term_tlv(Prop1,InnerType,DecObjInf),
+ asn1ct_name:new(rb),
+ PostponedDec =
+ gen_dec_line(Erules,TopType,Cname,CTags,Type,Prop1,DecObjInf),
+ asn1ct_name:new(v),
+ asn1ct_name:new(tlv),
+ asn1ct_name:new(form),
+ PostponedDec.
+
+
+emit_term_tlv({'DEFAULT',_},InnerType,DecObjInf) ->
+ emit_term_tlv(opt_or_def,InnerType,DecObjInf);
+emit_term_tlv('OPTIONAL',InnerType,DecObjInf) ->
+ emit_term_tlv(opt_or_def,InnerType,DecObjInf);
+emit_term_tlv(Prop,{typefield,_},DecObjInf) ->
+ emit_term_tlv(Prop,type_or_object_field,DecObjInf);
+emit_term_tlv(Prop,{objectfield,_,_},DecObjInf) ->
+ emit_term_tlv(Prop,type_or_object_field,DecObjInf);
+emit_term_tlv(opt_or_def,type_or_object_field,_) ->
+ asn1ct_name:new(tmpterm),
+ emit(["{",{curr,tmpterm},",",{curr,tlv},"} = "]);
+emit_term_tlv(opt_or_def,_,_) ->
+ emit(["{",{curr,term},",",{curr,tlv},"} = "]);
+emit_term_tlv(_,type_or_object_field,false) ->
+ emit(["[",{curr,v},"|",{curr,tlv},"] = ",{prev,tlv},", ",nl,
+ {curr,term}," = "]);
+emit_term_tlv(_,type_or_object_field,_) ->
+ asn1ct_name:new(tmpterm),
+ emit(["[",{curr,v},"|",{curr,tlv},"] = ",{prev,tlv},", ",nl]),
+ emit([nl," ",{curr,tmpterm}," = "]);
+emit_term_tlv(mandatory,_,_) ->
+ emit(["[",{curr,v},"|",{curr,tlv},"] = ",{prev,tlv},", ",nl,
+ {curr,term}," = "]).
+
+
+gen_dec_set_cases(_Erules,_TopType,[],Pos) ->
+ Pos;
+gen_dec_set_cases(Erules,TopType,[Comp|RestComps],Pos) ->
+ Name = Comp#'ComponentType'.name,
+ Type = Comp#'ComponentType'.typespec,
+ CTags = Comp#'ComponentType'.tags,
+
+ emit([indent(6),"%",Name,nl]),
+ Tags = case Type#type.tag of
+ [] -> % this is a choice without explicit tag
+ [(?ASN1CT_GEN_BER:decode_class(T1class) bsl 10) + T1number||
+ {T1class,T1number} <- CTags];
+ [FirstTag|_] ->
+ [(?ASN1CT_GEN_BER:decode_class(FirstTag#tag.class) bsl 10) + FirstTag#tag.number]
+ end,
+% emit([indent(6),"%Tags: ",Tags,nl]),
+% emit([indent(6),"%Type#type.tag: ",Type#type.tag,nl]),
+ CaseFun = fun(TagList=[H|T],Fun,N) ->
+ Semicolon = case TagList of
+ [_Tag1,_|_] -> [";",nl];
+ _ -> ""
+ end,
+ emit(["TTlv = {",H,",_} ->",nl]),
+ emit([indent(4),"{",Pos,", TTlv}",Semicolon]),
+ Fun(T,Fun,N+1);
+ ([],_,0) ->
+ true;
+ ([],_,_) ->
+ emit([";",nl])
+ end,
+ CaseFun(Tags,CaseFun,0),
+%% emit([";",nl]),
+ gen_dec_set_cases(Erules,TopType,RestComps,Pos+1).
+
+
+
+%%---------------------------------------------
+%% Encode CHOICE
+%%---------------------------------------------
+%% for BER we currently do care (a little) if the choice has an EXTENSIONMARKER
+
+
+gen_enc_choice(Erules,TopType,Tag,CompList,_Ext) ->
+ gen_enc_choice1(Erules,TopType,Tag,CompList,_Ext).
+
+gen_enc_choice1(Erules,TopType,_Tag,CompList,_Ext) ->
+ asn1ct_name:clear(),
+ emit([" {EncBytes,EncLen} = case element(1,Val) of",nl]),
+ gen_enc_choice2(Erules,TopType,CompList),
+ emit([nl," end,",nl,nl]),
+
+ emit(["?RT_BER:encode_tags(TagIn, EncBytes, EncLen).",nl]).
+
+
+gen_enc_choice2(Erules,TopType,[H1|T]) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ emit([" ",{asis,Cname}," ->",nl]),
+ {Encobj,Assign} =
+ case {Type#type.def,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+ {#'ObjectClassFieldType'{},{componentrelation,_,_}} ->
+ asn1ct_name:new(tmpBytes),
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Emit = ["{",{curr,tmpBytes},", _} = "],
+ {{no_attr,"ObjFun"},Emit};
+ _ ->
+ {false,[]}
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% {false,[]};
+% _ ->
+% asn1ct_name:new(tmpBytes),
+% asn1ct_name:new(encBytes),
+% asn1ct_name:new(encLen),
+% Emit = ["{",{curr,tmpBytes},", _} = "],
+% {{no_attr,"ObjFun"},Emit}
+% end,
+ gen_enc_line(Erules,TopType,Cname,Type,"element(2,Val)",9,
+ mandatory,Assign,Encobj),
+ case Encobj of
+ false -> ok;
+ _ ->
+ emit([",",nl,indent(9),"{",{curr,encBytes},", ",
+ {curr,encLen},"}"])
+ end,
+ emit([";",nl]),
+ case T of
+ [] ->
+ emit([indent(6), "Else -> ",nl,
+ indent(9),"exit({error,{asn1,{invalid_choice_type,Else}}})"]);
+ _ ->
+ true
+ end,
+ gen_enc_choice2(Erules,TopType,T);
+
+gen_enc_choice2(_Erules,_TopType,[]) ->
+ true.
+
+
+
+
+%%--------------------------------------------
+%% Decode CHOICE
+%%--------------------------------------------
+
+gen_dec_choice(Erules,TopType, _ChTag, CompList, Ext) ->
+ asn1ct_name:clear(),
+ asn1ct_name:new(tlv),
+ emit([{curr,tlv},
+ " = ?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(tlv),
+ asn1ct_name:new(v),
+ emit(["case (case ",{prev,tlv},
+ " of [Ctemp",{prev,tlv},"] -> Ctemp",{prev,tlv},
+ "; _ -> ",{prev,tlv}," end)"," of",nl]),
+ asn1ct_name:new(tagList),
+ asn1ct_name:new(choTags),
+ asn1ct_name:new(res),
+ gen_dec_choice_cases(Erules,TopType,CompList),
+ emit([indent(6), {curr,else}," -> ",nl]),
+ case Ext of
+ noext ->
+ emit([indent(9),"exit({error,{asn1,{invalid_choice_tag,",
+ {curr,else},"}}})",nl]);
+ _ ->
+ emit([indent(9),"{asn1_ExtAlt, ?RT_BER:encode(",{curr,else},")}",nl])
+ end,
+ emit([indent(3),"end",nl]),
+ asn1ct_name:new(tag),
+ asn1ct_name:new(else).
+
+
+gen_dec_choice_cases(_Erules,_TopType, []) ->
+ ok;
+gen_dec_choice_cases(Erules,TopType, [H|T]) ->
+ Cname = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+ Prop = H#'ComponentType'.prop,
+ Tags = Type#type.tag,
+ Fcases = fun([{T1class,T1number}|Tail],Fun) ->
+ emit([indent(4),{curr,v}," = {",
+ (?ASN1CT_GEN_BER:decode_class(T1class) bsl 10) +
+ T1number,",_} -> ",nl]),
+ emit([indent(8),"{",{asis,Cname},", "]),
+ gen_dec_line(Erules,TopType,Cname,[],Type,Prop,false),
+ emit(["};",nl,nl]),
+ Fun(Tail,Fun);
+ ([],_) ->
+ ok
+ end,
+ emit([nl,"%% '",Cname,"'",nl]),
+ case {Tags,asn1ct:get_gen_state_field(namelist)} of
+ {[],_} -> % choice without explicit tags
+ Fcases(H#'ComponentType'.tags,Fcases);
+ {[FirstT|_RestT],[{Cname,undecoded}|Names]} ->
+ DecTag=(?ASN1CT_GEN_BER:decode_class(FirstT#tag.class) bsl 10) +
+ FirstT#tag.number,
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ [DecTag],Type}),
+ asn1ct:update_gen_state(namelist,Names),
+ emit([indent(4),{curr,res}," = ",
+ match_tag(ber_bin,{FirstT#tag.class,FirstT#tag.number}),
+ " -> ",nl]),
+ emit([indent(8),"{",{asis,Cname},", {'",
+ asn1ct_gen:list2name([Cname|TopType]),"',",
+ {curr,res},"}};",nl,nl]);
+ {[FirstT|RestT],_} ->
+ emit([indent(4),"{",
+ (?ASN1CT_GEN_BER:decode_class(FirstT#tag.class) bsl 10) +
+ FirstT#tag.number,", ",{curr,v},"} -> ",nl]),
+ emit([indent(8),"{",{asis,Cname},", "]),
+ gen_dec_line(Erules,TopType,Cname,[],Type#type{tag=RestT},Prop,false),
+ emit(["};",nl,nl])
+ end,
+ gen_dec_choice_cases(Erules,TopType, T).
+
+
+
+%%---------------------------------------
+%% Generate the encode/decode code
+%%---------------------------------------
+
+gen_enc_line(Erules,TopType,Cname,
+ Type=#type{constraint=[{componentrelation,_,_}],
+ def=#'ObjectClassFieldType'{type={typefield,_}}},
+ Element,Indent,OptOrMand=mandatory,EncObj)
+ when list(Element) ->
+ asn1ct_name:new(tmpBytes),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,tmpBytes},",_} = "],EncObj);
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,EncObj)
+ when list(Element) ->
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,encBytes},",",{curr,encLen},"} = "],EncObj).
+
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,Assign,EncObj)
+ when list(Element) ->
+ IndDeep = indent(Indent),
+ Tag = lists:reverse([?ASN1CT_GEN_BER:encode_tag_val(
+ ?ASN1CT_GEN_BER:decode_class(X#tag.class),
+ X#tag.form,
+ X#tag.number)
+ || X <- Type#type.tag]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ emit(IndDeep),
+ emit(Assign),
+ gen_optormand_case(OptOrMand,Erules,TopType,Cname,Type,InnerType,WhatKind,
+ Element),
+ case {Type,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+% #type{constraint=[{tableconstraint_info,RefedFieldName}],
+% def={typefield,_}} ->
+ {#type{def=#'ObjectClassFieldType'{type={typefield,_},
+ fieldname=RefedFieldName}},
+ {componentrelation,_,_}} ->
+ {_LeadingAttrName,Fun} = EncObj,
+ case RefedFieldName of
+ {notype,T} ->
+ throw({error,{notype,type_from_object,T}});
+ {Name,RestFieldNames} when atom(Name) ->
+ case OptOrMand of
+ mandatory -> ok;
+ _ ->
+% emit(["{",{curr,tmpBytes},",",{curr,tmpLen},
+ emit(["{",{curr,tmpBytes},",_ } = "])
+% "} = "])
+ end,
+ emit([Fun,"(",{asis,Name},", ",Element,", ",
+ {asis,RestFieldNames},"),",nl]),
+ emit(IndDeep),
+ case OptOrMand of
+ mandatory ->
+ emit(["{",{curr,encBytes},",",{curr,encLen},
+ "} = "]),
+ emit(["?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},")"]);
+ _ ->
+% emit(["{",{next,tmpBytes},", _} = "]),
+ emit(["{",{next,tmpBytes},",",{curr,tmpLen},
+ "} = "]),
+ emit(["?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},"),",nl]),
+ emit(IndDeep),
+ emit(["{",{next,tmpBytes},", ",{curr,tmpLen},"}"])
+ end;
+ _ ->
+ throw({asn1,{'internal error'}})
+ end;
+ {{#'ObjectClassFieldType'{type={objectfield,PrimFieldName1,
+ PFNList}},_},
+ {componentrelation,_,_}} ->
+ %% this is when the dotted list in the FieldName has more
+ %% than one element
+ {_LeadingAttrName,Fun} = EncObj,
+ emit(["?RT_BER:encode_open_type(",Fun,"(",{asis,PrimFieldName1},
+ ", ",Element,", ",{asis,PFNList},"))"]);
+ _ ->
+ case WhatKind of
+ {primitive,bif} ->
+ EncType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Btype}} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ ?ASN1CT_GEN_BER:gen_encode_prim(ber,EncType,{asis,Tag},
+ Element);
+ {notype,_} ->
+ emit(["'enc_",InnerType,"'(",Element,", ",{asis,Tag},")"]);
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.def of
+ #'ObjectClassFieldType'{} -> %Open Type
+ ?ASN1CT_GEN_BER:gen_encode_prim(ber,#type{def='ASN1_OPEN_TYPE'},{asis,Tag},Element);
+ _ ->
+ ?ASN1CT_GEN_BER:gen_encode_prim(ber,Type,
+ {asis,Tag},
+ Element)
+ end;
+ _ ->
+ {EncFunName, _EncMod, _EncFun} =
+ mkfuncname(TopType,Cname,WhatKind,"enc_"),
+ case {WhatKind,Type#type.tablecinf,EncObj} of
+ {{constructed,bif},[{objfun,_}|_R],{_,Fun}} ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},
+ ", ",Fun,")"]);
+ _ ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},")"])
+ end
+ end
+ end,
+ case OptOrMand of
+ mandatory -> true;
+ _ ->
+ emit([nl,indent(7),"end"])
+ end.
+
+gen_optormand_case(mandatory,_Erules,_TopType,_Cname,_Type,_InnerType,_WhatKind,
+ _Element) ->
+ ok;
+gen_optormand_case('OPTIONAL',Erules,_TopType,_Cname,_Type,_InnerType,_WhatKind,
+ Element) ->
+ emit([" case ",Element," of",nl]),
+ emit([indent(9),"asn1_NOVALUE -> {",
+ empty_lb(Erules),",0};",nl]),
+ emit([indent(9),"_ ->",nl,indent(12)]);
+gen_optormand_case({'DEFAULT',DefaultValue},Erules,TopType,Cname,Type,
+ InnerType,WhatKind,Element) ->
+ CurrMod = get(currmod),
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit(" case catch "),
+ asn1ct_gen:gen_check_call(TopType,Cname,Type,InnerType,
+ WhatKind,{asis,DefaultValue},
+ Element),
+ emit([" of",nl]),
+ emit([indent(12),"true -> {[],0};",nl]);
+ _ ->
+ emit([" case ",Element," of",nl]),
+ emit([indent(9),"asn1_DEFAULT -> {",
+ empty_lb(Erules),
+ ",0};",nl]),
+ case DefaultValue of
+ #'Externalvaluereference'{module=CurrMod,
+ value=V} ->
+ emit([indent(9),"?",{asis,V}," -> {",
+ empty_lb(Erules),",0};",nl]);
+ _ ->
+ emit([indent(9),{asis,
+ DefaultValue}," -> {",
+ empty_lb(Erules),",0};",nl])
+ end
+ end,
+ emit([indent(9),"_ ->",nl,indent(12)]).
+
+
+
+gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(v)),
+ Tag =
+ [(?ASN1CT_GEN_BER:decode_class(X#tag.class) bsl 10) + X#tag.number ||
+ X <- Type#type.tag],
+ ChoiceTags =
+ [(?ASN1CT_GEN_BER:decode_class(Class) bsl 10) + Number||
+ {Class,Number} <- CTags],
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+ PostpDec =
+ case OptOrMand of
+ mandatory ->
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,
+ mandatory,", mandatory, ",DecObjInf,OptOrMand);
+ _ -> %optional or default or a mandatory component after an extensionmark
+ {FirstTag,RestTag} =
+ case Tag of
+ [] ->
+ {ChoiceTags,[]};
+ [Ft|Rt] ->
+ {Ft,Rt}
+ end,
+ emit(["case ",{prev,tlv}," of",nl]),
+ PostponedDec =
+ case Tag of
+ [] when length(ChoiceTags) > 0 -> % a choice without explicit tag
+ Fcases =
+ fun(FirstTag1) ->
+ emit(["[",{curr,v}," = {",{asis,FirstTag1},
+ ",_}|Temp",
+ {curr,tlv},
+ "] ->",nl]),
+ emit([indent(4),"{"]),
+ Pdec=
+ gen_dec_call(InnerType,Erules,
+ TopType,Cname,Type,
+ BytesVar,RestTag,
+ mandatory,
+ ", mandatory, ",
+ DecObjInf,OptOrMand),
+
+ emit([", Temp",{curr,tlv},"}"]),
+ emit([";",nl]),
+ Pdec
+ end,
+ hd([Fcases(TmpTag)|| TmpTag <- FirstTag]);
+
+ [] -> % an open type without explicit tag
+ emit(["[",{curr,v},"|Temp",{curr,tlv},"] ->",nl]),
+ emit([indent(4),"{"]),
+ Pdec=
+ gen_dec_call(InnerType,Erules,TopType,Cname,
+ Type,BytesVar,RestTag,mandatory,
+ ", mandatory, ",DecObjInf,
+ OptOrMand),
+
+ emit([", Temp",{curr,tlv},"}"]),
+ emit([";",nl]),
+ Pdec;
+
+ _ ->
+ emit(["[{",{asis,FirstTag},
+ ",",{curr,v},"}|Temp",
+ {curr,tlv},
+ "] ->",nl]),
+ emit([indent(4),"{"]),
+ Pdec=
+ gen_dec_call(InnerType,Erules,TopType,Cname,
+ Type,BytesVar,RestTag,mandatory,
+ ", mandatory, ",DecObjInf,
+ OptOrMand),
+
+ emit([", Temp",{curr,tlv},"}"]),
+ emit([";",nl]),
+ Pdec
+ end,
+
+ emit([indent(4),"_ ->",nl]),
+ case OptOrMand of
+ {'DEFAULT', Def} ->
+ emit([indent(8),"{",{asis,Def},",",{prev,tlv},"}",nl]);
+ 'OPTIONAL' ->
+ emit([indent(8),"{ asn1_NOVALUE, ",{prev,tlv},"}",nl])
+ end,
+ emit(["end"]),
+ PostponedDec
+ end,
+ case DecObjInf of
+ {Cname,ObjSet} -> % this must be the component were an object is
+ %% choosen from the object set according to the table
+ %% constraint.
+ {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
+ PostpDec};
+ _ -> {[],PostpDec}
+ end.
+
+gen_dec_call({typefield,_},_,_,_Cname,Type,BytesVar,Tag,_,_,false,_) ->
+ %% this in case of a choice with typefield components
+ asn1ct_name:new(reason),
+ asn1ct_name:new(opendec),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(tmptlv),
+
+ {FirstPFName,RestPFName} =
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ emit([nl,indent(6),"begin",nl]),
+% emit([indent(9),{curr,opendec}," = ?RT_BER:decode_open_type(",
+ emit([indent(9),{curr,tmptlv}," = ?RT_BER:decode_open_type(",
+ BytesVar,",",{asis,Tag},"),",nl]),
+% emit([indent(9),"{",{curr,tmptlv},",_} = ?RT_BER:decode(",
+% {curr,opendec},"),",nl]),
+
+ emit([indent(9),"case (catch ObjFun(",{asis,FirstPFName},
+ ", ",{curr,tmptlv},", ",{asis,RestPFName},
+ ")) of", nl]),%% ??? What about Tag
+ emit([indent(12),"{'EXIT',",{curr,reason},"} ->",nl]),
+ emit([indent(15),"exit({'Type not ",
+ "compatible with table constraint', ",{curr,reason},"});",nl]),
+ emit([indent(12),{curr,tmpterm}," ->",nl]),
+ emit([indent(15),{curr,tmpterm},nl]),
+ emit([indent(9),"end",nl,indent(6),"end",nl]),
+ [];
+gen_dec_call({typefield,_},_,_,Cname,Type,BytesVar,Tag,_,_,_DecObjInf,OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",BytesVar,",",{asis,Tag},")"]),
+ RefedFieldName =
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ [{Cname,RefedFieldName,asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call({objectfield,PrimFieldName,PFNList},_,_,Cname,_,BytesVar,Tag,_,_,_,OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",BytesVar,",",{asis,Tag},")"]),
+ [{Cname,{PrimFieldName,PFNList},asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call(InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,PrimOptOrMand,
+ OptOrMand,DecObjInf,_) ->
+ WhatKind = asn1ct_gen:type(InnerType),
+ gen_dec_call1(WhatKind,InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,
+ PrimOptOrMand,OptOrMand),
+ case DecObjInf of
+ {Cname,{_,OSet,UniqueFName,ValIndex}} ->
+ Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ ValueMatch = value_match(ValIndex,Term),
+ emit([",",nl,"ObjFun = 'getdec_",OSet,"'(",
+% {asis,UniqueFName},", ",{curr,term},")"]);
+ {asis,UniqueFName},", ",ValueMatch,")"]);
+ _ ->
+ ok
+ end,
+ [].
+gen_dec_call1({primitive,bif},InnerType,Erules,TopType,Cname,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ case {asn1ct:get_gen_state_field(namelist),InnerType} of
+ {[{Cname,undecoded}|Rest],_} ->
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ Tag,Type}),
+ asn1ct:update_gen_state(namelist,Rest),
+% emit(["?RT_BER:match_tags(",BytesVar,",",{asis,Tag},")"]);
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',",
+ BytesVar,"}"]);
+ {_,{fixedtypevaluefield,_,Btype}} ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,Btype,BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand);
+ _ ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,Type,BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand)
+ end;
+gen_dec_call1('ASN1_OPEN_TYPE',_InnerType,Erules,TopType,Cname,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ case {asn1ct:get_gen_state_field(namelist),Type#type.def} of
+ {[{Cname,undecoded}|Rest],_} ->
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ Tag,Type}),
+ asn1ct:update_gen_state(namelist,Rest),
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',",
+ BytesVar,"}"]);
+% emit(["?RT_BER:match_tags(",BytesVar,",",{asis,Tag},")"]);
+ {_,#'ObjectClassFieldType'{type=OpenType}} ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,#type{def=OpenType},
+ BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand);
+ _ ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,Type,BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand)
+ end;
+gen_dec_call1(WhatKind,_,_Erules,TopType,Cname,Type,BytesVar,
+ Tag,_,_OptOrMand) ->
+ case asn1ct:get_gen_state_field(namelist) of
+ [{Cname,undecoded}|Rest] ->
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ Tag,Type}),
+ asn1ct:update_gen_state(namelist,Rest),
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',",
+ BytesVar,"}"]);
+ _ ->
+% {DecFunName, _DecMod, _DecFun} =
+% case {asn1ct:get_gen_state_field(namelist),WhatKind} of
+ EmitDecFunCall =
+ fun(FuncName) ->
+ case {WhatKind,Type#type.tablecinf} of
+ {{constructed,bif},[{objfun,_}|_Rest]} ->
+ emit([FuncName,"(",BytesVar,", ",{asis,Tag},
+ ", ObjFun)"]);
+ _ ->
+ emit([FuncName,"(",BytesVar,", ",{asis,Tag},")"])
+ end
+ end,
+ case asn1ct:get_gen_state_field(namelist) of
+ [{Cname,List}|Rest] when list(List) ->
+ case WhatKind of
+ #'Externaltypereference'{} ->
+ %%io:format("gen_dec_call1 1:~n~p~n~n",[WhatKind]),
+ asn1ct:add_tobe_refed_func({WhatKind,List});
+ _ ->
+ %%io:format("gen_dec_call1 2:~n~p~n~n",[[Cname|TopType]]),
+ asn1ct:add_tobe_refed_func({[Cname|TopType],
+ List})
+ end,
+ asn1ct:update_gen_state(namelist,Rest),
+ Prefix=asn1ct:get_gen_state_field(prefix),
+ {DecFunName,_,_}=
+ mkfuncname(TopType,Cname,WhatKind,Prefix),
+ EmitDecFunCall(DecFunName);
+ [{Cname,parts}|Rest] ->
+ asn1ct:update_gen_state(namelist,Rest),
+ asn1ct:get_gen_state_field(prefix),
+ %% This is to prepare SEQUENCE OF value in
+ %% partial incomplete decode for a later
+ %% part-decode, i.e. skip %% the tag.
+ asn1ct:add_generated_refed_func({[Cname|TopType],
+ parts,
+ [],Type}),
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',"]),
+ EmitDecFunCall("?RT_BER:match_tags"),
+ emit("}");
+ _ ->
+ {DecFunName,_,_}=
+ mkfuncname(TopType,Cname,WhatKind,"dec_"),
+ EmitDecFunCall(DecFunName)
+ end
+% case {WhatKind,Type#type.tablecinf} of
+% {{constructed,bif},[{objfun,_}|_Rest]} ->
+% emit([DecFunName,"(",BytesVar,", ",{asis,Tag},
+% ", ObjFun)"]);
+% _ ->
+% emit([DecFunName,"(",BytesVar,", ",{asis,Tag},")"])
+% end
+ end.
+
+
+%%------------------------------------------------------
+%% General and special help functions (not exported)
+%%------------------------------------------------------
+
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+mkcindexlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
+ emit(["Cindex",H,Sep]),
+ mkcindexlist([T1|T], Sep);
+mkcindexlist([H|T], Sep) ->
+ emit(["Cindex",H]),
+ mkcindexlist(T, Sep);
+mkcindexlist([], _) ->
+ true.
+
+mkcindexlist(L) ->
+ mkcindexlist(L,", ").
+
+
+mkvlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
+ emit([{var,H},Sep]),
+ mkvlist([T1|T], Sep);
+mkvlist([H|T], Sep) ->
+ emit([{var,H}]),
+ mkvlist(T, Sep);
+mkvlist([], _) ->
+ true.
+
+mkvlist(L) ->
+ mkvlist(L,", ").
+
+mkvplus(L) ->
+ mkvlist(L," + ").
+
+extensible(CompList) when list(CompList) ->
+ noext;
+extensible({RootList,ExtList}) ->
+ {ext,length(RootList)+1,length(ExtList)}.
+
+
+print_attribute_comment(InnerType,Pos,Cname,Prop) ->
+ CommentLine = "%%-------------------------------------------------",
+ emit([nl,CommentLine]),
+ case InnerType of
+ {typereference,_,Name} ->
+ emit([nl,"%% attribute ",Cname,"(",Pos,") with type ",Name]);
+ {'Externaltypereference',_,XModule,Name} ->
+ emit([nl,"%% attribute ",Cname,"(",Pos,") External ",XModule,":",Name]);
+ _ ->
+ emit([nl,"%% attribute ",Cname,"(",Pos,") with type ",InnerType])
+ end,
+ case Prop of
+ mandatory ->
+ continue;
+ {'DEFAULT', Def} ->
+ emit([" DEFAULT = ",{asis,Def}]);
+ 'OPTIONAL' ->
+ emit([" OPTIONAL"])
+ end,
+ emit([nl,CommentLine,nl]).
+
+
+
+mkfuncname(TopType,Cname,WhatKind,Prefix) ->
+ CurrMod = get(currmod),
+ case WhatKind of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ F = lists:concat(["'",Prefix,EType,"'"]),
+ {F, "?MODULE", F};
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ {lists:concat(["'",Mod,"':'",Prefix,EType,"'"]),Mod,
+ lists:concat(["'",Prefix,EType,"'"])};
+ {constructed,bif} ->
+ F = lists:concat(["'",Prefix,asn1ct_gen:list2name([Cname|TopType]),"'"]),
+ {F, "?MODULE", F}
+ end.
+
+empty_lb(ber) ->
+ "[]";
+empty_lb(ber_bin) ->
+ "<<>>";
+empty_lb(ber_bin_v2) ->
+ "<<>>".
+
+value_match(Index,Value) when atom(Value) ->
+ value_match(Index,atom_to_list(Value));
+value_match([],Value) ->
+ Value;
+value_match([{VI,_}|VIs],Value) ->
+ value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
+value_match1(Value,[],Acc,Depth) ->
+ Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
+value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
+ value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_per.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_per.erl
new file mode 100644
index 0000000000..9b4e0063bb
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_constructed_per.erl
@@ -0,0 +1,1235 @@
+% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_constructed_per.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_constructed_per).
+
+-export([gen_encode_sequence/3]).
+-export([gen_decode_sequence/3]).
+-export([gen_encode_set/3]).
+-export([gen_decode_set/3]).
+-export([gen_encode_sof/4]).
+-export([gen_decode_sof/4]).
+-export([gen_encode_choice/3]).
+-export([gen_decode_choice/3]).
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+
+%% ENCODE GENERATOR FOR SEQUENCE TYPE ** **********
+
+
+gen_encode_set(Erules,TypeName,D) ->
+ gen_encode_constructed(Erules,TypeName,D).
+
+gen_encode_sequence(Erules,TypeName,D) ->
+ gen_encode_constructed(Erules,TypeName,D).
+
+gen_encode_constructed(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+ {CompList,TableConsInfo} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {CL,TCI};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {CL,TCI}
+ end,
+ case Typename of
+ ['EXTERNAL'] ->
+ emit({{var,asn1ct_name:next(val)},
+ " = asn1rt_check:transform_to_EXTERNAL1990(",
+ {var,asn1ct_name:curr(val)},"),",nl}),
+ asn1ct_name:new(val);
+ _ ->
+ ok
+ end,
+ case {Optionals = optionals(CompList),CompList} of
+ {[],EmptyCL} when EmptyCL == {[],[]};EmptyCL == [] ->
+ emit(["%%Variable setting just to eliminate ",
+ "compiler warning for unused vars!",nl,
+ "_Val = ",{var,asn1ct_name:curr(val)},",",nl]);
+ {[],_} ->
+ emit([{var,asn1ct_name:next(val)}," = ?RT_PER:list_to_record("]),
+ emit(["'",asn1ct_gen:list2rname(Typename),"'"]),
+ emit([", ",{var,asn1ct_name:curr(val)},"),",nl]);
+ _ ->
+ Fixoptcall =
+ case Erules of
+ per -> ",Opt} = ?RT_PER:fixoptionals2(";
+ _ -> ",Opt} = ?RT_PER:fixoptionals("
+ end,
+ emit({"{",{var,asn1ct_name:next(val)},Fixoptcall,
+ {asis,Optionals},",",length(Optionals),
+ ",",{var,asn1ct_name:curr(val)},"),",nl})
+ end,
+ asn1ct_name:new(val),
+ Ext = extensible(CompList),
+ case Ext of
+ {ext,_,NumExt} when NumExt > 0 ->
+ emit(["Extensions = ?RT_PER:fixextensions(",{asis,Ext},
+ ", ",{curr,val},"),",nl]);
+ _ -> true
+ end,
+ EncObj =
+ case TableConsInfo of
+ #simpletableattributes{usedclassfield=Used,
+ uniqueclassfield=Unique} when Used /= Unique ->
+ false;
+ %% ObjectSet, name of the object set in constraints
+ %%
+ %%{ObjectSet,AttrN,N,UniqueFieldName} -> %% N is index of attribute that determines constraint
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex
+ } -> %% N is index of attribute that determines constraint
+ OSDef =
+ case ObjectSet of
+ {Module,OSName} ->
+ asn1_db:dbget(Module,OSName);
+ OSName ->
+ asn1_db:dbget(get(currmod),OSName)
+ end,
+ case (OSDef#typedef.typespec)#'ObjectSet'.gen of
+ true ->
+ ObjectEncode =
+ asn1ct_gen:un_hyphen_var(lists:concat(['Obj',AttrN])),
+ emit([ObjectEncode," = ",nl]),
+ emit([" 'getenc_",ObjectSet,"'(",
+ {asis,UniqueFieldName},", ",nl]),
+ El = make_element(N+1,asn1ct_gen:mk_var(asn1ct_name:curr(val)),AttrN),
+ Indent = 12 + length(atom_to_list(ObjectSet)),
+ case ValueIndex of
+ [] ->
+ emit([indent(Indent),El,"),",nl]);
+ _ ->
+ emit([indent(Indent),"value_match(",
+ {asis,ValueIndex},",",El,")),",nl]),
+ notice_value_match()
+ end,
+ {AttrN,ObjectEncode};
+ _ ->
+ false
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ %% when the simpletableattributes was at an outer
+ %% level and the objfun has been passed through the
+ %% function call
+ {"got objfun through args","ObjFun"};
+ _ ->
+ false
+ end
+ end,
+ emit({"[",nl}),
+ MaybeComma1 =
+ case Ext of
+ {ext,_Pos,NumExt2} when NumExt2 > 0 ->
+ emit({"?RT_PER:setext(Extensions =/= [])"}),
+ ", ";
+ {ext,_Pos,_} ->
+ emit({"?RT_PER:setext(false)"}),
+ ", ";
+ _ ->
+ ""
+ end,
+ MaybeComma2 =
+ case optionals(CompList) of
+ [] -> MaybeComma1;
+ _ ->
+ emit(MaybeComma1),
+ emit("Opt"),
+ {",",nl}
+ end,
+ gen_enc_components_call(Typename,CompList,MaybeComma2,EncObj,Ext),
+ emit({"].",nl}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% generate decode function for SEQUENCE and SET
+%%
+gen_decode_set(Erules,Typename,D) ->
+ gen_decode_constructed(Erules,Typename,D).
+
+gen_decode_sequence(Erules,Typename,D) ->
+ gen_decode_constructed(Erules,Typename,D).
+
+gen_decode_constructed(_Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {CompList,TableConsInfo} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {CL,TCI};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {CL,TCI}
+ end,
+ Ext = extensible(CompList),
+ MaybeComma1 = case Ext of
+ {ext,_Pos,_NumExt} ->
+ gen_dec_extension_value("Bytes"),
+ {",",nl};
+ _ ->
+ ""
+ end,
+ Optionals = optionals(CompList),
+ MaybeComma2 = case Optionals of
+ [] -> MaybeComma1;
+ _ ->
+ Bcurr = asn1ct_name:curr(bytes),
+ Bnext = asn1ct_name:next(bytes),
+ emit(MaybeComma1),
+ GetoptCall = "} = ?RT_PER:getoptionals2(",
+ emit({"{Opt,",{var,Bnext},GetoptCall,
+ {var,Bcurr},",",{asis,length(Optionals)},")"}),
+ asn1ct_name:new(bytes),
+ ", "
+ end,
+ {DecObjInf,UniqueFName,ValueIndex} =
+ case TableConsInfo of
+%% {ObjectSet,AttrN,N,UniqueFieldName} ->%% N is index of attribute that determines constraint
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValIndex} ->
+%% {AttrN,ObjectSet};
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_R]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName,ValIndex}},
+ UniqueFieldName,ValIndex};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName,ValIndex}
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ {{"got objfun through args","ObjFun"},false,false};
+ _ ->
+ {false,false,false}
+ end
+ end,
+ {AccTerm,AccBytes} =
+ gen_dec_components_call(Typename,CompList,MaybeComma2,DecObjInf,Ext,length(Optionals)),
+ case asn1ct_name:all(term) of
+ [] -> emit(MaybeComma2); % no components at all
+ _ -> emit({com,nl})
+ end,
+ case {AccTerm,AccBytes} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} ->
+ DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
+ ValueMatch = value_match(ValueIndex,Term),
+ emit({DecObj," =",nl," 'getdec_",ObjSet,"'(",
+% {asis,UniqueFName},", ",Term,"),",nl}),
+ {asis,UniqueFName},", ",ValueMatch,"),",nl}),
+ gen_dec_listofopentypes(DecObj,ListOfOpenTypes,false)
+ end,
+ %% we don't return named lists any more Cnames = mkcnamelist(CompList),
+ demit({"Result = "}), %dbg
+ %% return value as record
+ case Typename of
+ ['EXTERNAL'] ->
+ emit({" OldFormat={'",asn1ct_gen:list2rname(Typename),
+ "'"}),
+ mkvlist(asn1ct_name:all(term)),
+ emit({"},",nl}),
+ emit({" ASN11994Format =",nl,
+ " asn1rt_check:transform_to_EXTERNAL1994",
+ "(OldFormat),",nl}),
+ emit(" {ASN11994Format,");
+ _ ->
+ emit(["{{'",asn1ct_gen:list2rname(Typename),"'"]),
+ mkvlist(asn1ct_name:all(term)),
+ emit("},")
+ end,
+ emit({{var,asn1ct_name:curr(bytes)},"}"}),
+ emit({".",nl,nl}).
+
+gen_dec_listofopentypes(_,[],_) ->
+ emit(nl);
+gen_dec_listofopentypes(DecObj,[{_Cname,{FirstPFN,PFNList},Term,TmpTerm,Prop}|Rest],_Update) ->
+
+% asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+
+ emit([Term," = ",nl]),
+
+ N = case Prop of
+ mandatory -> 0;
+ 'OPTIONAL' ->
+ emit_opt_or_mand_check(asn1_NOVALUE,TmpTerm),
+ 6;
+ {'DEFAULT',Val} ->
+ emit_opt_or_mand_check(Val,TmpTerm),
+ 6
+ end,
+
+ emit([indent(N+3),"case (catch ",DecObj,"(",
+ {asis,FirstPFN},", ",TmpTerm,", telltype,",{asis,PFNList},")) of",nl]),
+ emit([indent(N+6),"{'EXIT', ",{curr,reason},"} ->",nl]),
+%% emit({indent(9),"throw({runtime_error,{","'Type not compatible with table constraint'",",",Term,"}});",nl}),
+ emit([indent(N+9),"exit({'Type not compatible with table constraint',",
+ {curr,reason},"});",nl]),
+ emit([indent(N+6),"{",{curr,tmpterm},",_} ->",nl]),
+ emit([indent(N+9),{curr,tmpterm},nl]),
+
+ case Prop of
+ mandatory ->
+ emit([indent(N+3),"end,",nl]);
+ _ ->
+ emit([indent(N+3),"end",nl,
+ indent(3),"end,",nl])
+ end,
+ gen_dec_listofopentypes(DecObj,Rest,true).
+
+
+emit_opt_or_mand_check(Val,Term) ->
+ emit([indent(3),"case ",Term," of",nl,
+ indent(6),{asis,Val}," ->",{asis,Val},";",nl,
+ indent(6),"_ ->",nl]).
+
+%% ENCODE GENERATOR FOR THE CHOICE TYPE *******
+%% assume Val = {Alternative,AltType}
+%% generate
+%%[
+%% ?RT_PER:set_choice(element(1,Val),Altnum,Altlist,ext),
+%%case element(1,Val) of
+%% alt1 ->
+%% encode_alt1(element(2,Val));
+%% alt2 ->
+%% encode_alt2(element(2,Val))
+%%end
+%%].
+
+gen_encode_choice(_Erules,Typename,D) when record(D,type) ->
+ {'CHOICE',CompList} = D#type.def,
+ emit({"[",nl}),
+ Ext = extensible(CompList),
+ gen_enc_choice(Typename,CompList,Ext),
+ emit({nl,"].",nl}).
+
+gen_decode_choice(_Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(bytes),
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ gen_dec_choice(Typename,CompList,Ext),
+ emit({".",nl}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Encode generator for SEQUENCE OF type
+
+
+gen_encode_sof(_Erules,Typename,SeqOrSetOf,D) when record(D,type) ->
+ asn1ct_name:start(),
+% Val = [Component]
+% ?RT_PER:encode_length(length(Val)),
+% lists:
+ {_SeqOrSetOf,ComponentType} = D#type.def,
+ emit({"[",nl}),
+ SizeConstraint =
+ case asn1ct_gen:get_constraint(D#type.constraint,
+ 'SizeConstraint') of
+ no -> undefined;
+ Range -> Range
+ end,
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _->
+ ""
+ end,
+ emit({nl,indent(3),"?RT_PER:encode_length(",
+ {asis,SizeConstraint},
+ ",length(Val)),",nl}),
+ emit({indent(3),"'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(Val",ObjFun,", [])"}),
+ emit({nl,"].",nl}),
+ NewComponentType =
+ case ComponentType#type.def of
+ {'ENUMERATED',_,Component}->
+ ComponentType#type{def={'ENUMERATED',Component}};
+ _ -> ComponentType
+ end,
+ gen_encode_sof_components(Typename,SeqOrSetOf,NewComponentType).
+
+gen_decode_sof(_Erules,Typename,SeqOrSetOf,D) when record(D,type) ->
+ asn1ct_name:start(),
+% Val = [Component]
+% ?RT_PER:encode_length(length(Val)),
+% lists:
+ {_SeqOrSetOf,ComponentType} = D#type.def,
+ SizeConstraint =
+ case asn1ct_gen:get_constraint(D#type.constraint,
+ 'SizeConstraint') of
+ no -> undefined;
+ Range -> Range
+ end,
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({nl,"{Num,Bytes1} = ?RT_PER:decode_length(Bytes,",{asis,SizeConstraint},"),",nl}),
+ emit({"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(Num, Bytes1, telltype",ObjFun,", []).",nl}),
+ NewComponentType =
+ case ComponentType#type.def of
+ {'ENUMERATED',_,Component}->
+ ComponentType#type{def={'ENUMERATED',Component}};
+ _ -> ComponentType
+ end,
+ gen_decode_sof_components(Typename,SeqOrSetOf,NewComponentType).
+
+gen_encode_sof_components(Typename,SeqOrSetOf,Cont) ->
+ {ObjFun,ObjFun_Var} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _"};
+ _ ->
+ {"",""}
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"_components'([]",
+ ObjFun_Var,", Acc) -> lists:reverse(Acc);",nl,nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"_components'([H|T]",
+ ObjFun,", Acc) ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"_components'(T"}),
+ emit({ObjFun,", ["}),
+ %% the component encoder
+ Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,
+ Cont#type.def),
+
+ Conttype = asn1ct_gen:get_inner(Cont#type.def),
+ Currmod = get(currmod),
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ case asn1ct_gen:type(Conttype) of
+ {primitive,bif} ->
+ gen_encode_prim_wrapper(Ctgenmod,per,Cont,false,"H");
+% Ctgenmod:gen_encode_prim(per,Cont,false,"H");
+ {constructed,bif} ->
+ NewTypename = [Constructed_Suffix|Typename],
+ emit({"'enc_",asn1ct_gen:list2name(NewTypename),"'(H",
+ ObjFun,")",nl,nl});
+ #'Externaltypereference'{module=Currmod,type=Ename} ->
+ emit({"'enc_",Ename,"'(H)",nl,nl});
+ #'Externaltypereference'{module=EMod,type=EType} ->
+ emit({"'",EMod,"':'enc_",EType,"'(H)",nl,nl});
+ _ ->
+ emit({"'enc_",Conttype,"'(H)",nl,nl})
+ end,
+ emit({" | Acc]).",nl}).
+
+gen_decode_sof_components(Typename,SeqOrSetOf,Cont) ->
+ {ObjFun,ObjFun_Var} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _"};
+ _ ->
+ {"",""}
+ end,
+ emit({"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(0, Bytes, _",ObjFun_Var,", Acc) ->",nl,
+ indent(3),"{lists:reverse(Acc), Bytes};",nl}),
+ emit({"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(Num, Bytes, _",ObjFun,", Acc) ->",nl}),
+ emit({indent(3),"{Term,Remain} = "}),
+ Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,
+ Cont#type.def),
+ Conttype = asn1ct_gen:get_inner(Cont#type.def),
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ case asn1ct_gen:type(Conttype) of
+ {primitive,bif} ->
+ Ctgenmod:gen_dec_prim(per,Cont,"Bytes"),
+ emit({com,nl});
+ {constructed,bif} ->
+ NewTypename = [Constructed_Suffix|Typename],
+ emit({"'dec_",asn1ct_gen:list2name(NewTypename),
+ "'(Bytes, telltype",ObjFun,"),",nl});
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(Bytes,telltype),",nl});
+ #'Externaltypereference'{module=EMod,type=EType} ->
+ emit({"'",EMod,"':'dec_",EType,"'(Bytes,telltype),",nl});
+ _ ->
+ emit({"'dec_",Conttype,"'(Bytes,telltype),",nl})
+ end,
+ emit({indent(3),"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(Num-1, Remain, telltype",ObjFun,", [Term|Acc]).",nl}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% General and special help functions (not exported)
+
+mkvlist([H|T]) ->
+ emit(","),
+ mkvlist2([H|T]);
+mkvlist([]) ->
+ true.
+mkvlist2([H,T1|T]) ->
+ emit({{var,H},","}),
+ mkvlist2([T1|T]);
+mkvlist2([H|T]) ->
+ emit({{var,H}}),
+ mkvlist2(T);
+mkvlist2([]) ->
+ true.
+
+extensible(CompList) when list(CompList) ->
+ noext;
+extensible({RootList,ExtList}) ->
+ {ext,length(RootList)+1,length(ExtList)}.
+
+gen_dec_extension_value(_) ->
+ emit({"{Ext,",{next,bytes},"} = ?RT_PER:getext(",{curr,bytes},")"}),
+ asn1ct_name:new(bytes).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Produce a list with positions (in the Value record) where
+%% there are optional components, start with 2 because first element
+%% is the record name
+
+optionals({L,_Ext}) -> optionals(L,[],2);
+optionals(L) -> optionals(L,[],2).
+
+optionals([{'EXTENSIONMARK',_,_}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos); % optionals in extension are currently not handled
+optionals([#'ComponentType'{prop='OPTIONAL'}|Rest],Acc,Pos) ->
+ optionals(Rest,[Pos|Acc],Pos+1);
+optionals([#'ComponentType'{prop={'DEFAULT',_}}|Rest],Acc,Pos) ->
+ optionals(Rest,[Pos|Acc],Pos+1);
+optionals([#'ComponentType'{}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos+1);
+optionals([],Acc,_) ->
+ lists:reverse(Acc).
+
+
+gen_enc_components_call(TopType,{CompList,ExtList},MaybeComma,DynamicEnc,Ext) ->
+ %% The type has extensionmarker
+ Rpos = gen_enc_components_call1(TopType,CompList,1,MaybeComma,DynamicEnc,noext),
+ case Ext of
+ {ext,_,ExtNum} when ExtNum > 0 ->
+ emit([nl,
+ ",Extensions",nl]);
+ _ -> true
+ end,
+ %handle extensions
+ gen_enc_components_call1(TopType,ExtList,Rpos,MaybeComma,DynamicEnc,Ext);
+gen_enc_components_call(TopType, CompList, MaybeComma, DynamicEnc, Ext) ->
+ %% The type has no extensionmarker
+ gen_enc_components_call1(TopType,CompList,1,MaybeComma,DynamicEnc,Ext).
+
+gen_enc_components_call1(TopType,
+ [C=#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],
+ Tpos,
+ MaybeComma, DynamicEnc, Ext) ->
+
+ put(component_type,{true,C}),
+ %% information necessary in asn1ct_gen_per_rt2ct:gen_encode_prim
+
+ Pos = case Ext of
+ noext -> Tpos;
+ {ext,Epos,_Enum} -> Tpos - Epos + 1
+ end,
+ emit(MaybeComma),
+ case Prop of
+ 'OPTIONAL' ->
+ gen_enc_component_optional(TopType,Cname,Type,Tpos,DynamicEnc,Ext);
+ {'DEFAULT',_DefVal} ->
+ gen_enc_component_default(TopType,Cname,Type,Tpos,DynamicEnc,Ext);
+ _ ->
+ case Ext of
+ {ext,ExtPos,_} when Tpos >= ExtPos ->
+ gen_enc_component_optional(TopType,Cname,Type,Tpos,DynamicEnc,Ext);
+ _ ->
+ gen_enc_component_mandatory(TopType,Cname,Type,Tpos,DynamicEnc,Ext)
+ end
+ end,
+
+ erase(component_type),
+
+ case Rest of
+ [] ->
+ Pos+1;
+ _ ->
+ emit({com,nl}),
+ gen_enc_components_call1(TopType,Rest,Tpos+1,"",DynamicEnc,Ext)
+ end;
+gen_enc_components_call1(_TopType,[],Pos,_,_,_) ->
+ Pos.
+
+gen_enc_component_default(TopType,Cname,Type,Pos,DynamicEnc,Ext) ->
+% Element = io_lib:format("?RT_PER:cindex(~w,Val1,~w)",[Pos+1,Cname]),
+ Element = make_element(Pos+1,"Val1",Cname),
+ emit({"case ",Element," of",nl}),
+% case Ext of
+% {ext,ExtPos,_} when Pos >= ExtPos ->
+% emit({"asn1_NOEXTVALUE -> [];",nl});
+% _ ->
+ emit({"asn1_DEFAULT -> [];",nl}),
+% end,
+ asn1ct_name:new(tmpval),
+ emit({{curr,tmpval}," ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ emit({nl,"%% attribute number ",Pos," with type ",
+ InnerType,nl}),
+ NextElement = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ gen_enc_line(TopType,Cname,Type,NextElement, Pos,DynamicEnc,Ext),
+ emit({nl,"end"}).
+
+gen_enc_component_optional(TopType,Cname,Type,Pos,DynamicEnc,Ext) ->
+% Element = io_lib:format("?RT_PER:cindex(~w,Val1,~w)",[Pos+1,Cname]),
+ Element = make_element(Pos+1,"Val1",Cname),
+ emit({"case ",Element," of",nl}),
+% case Ext of
+% {ext,ExtPos,_} when Pos >= ExtPos ->
+% emit({"asn1_NOEXTVALUE -> [];",nl});
+% _ ->
+ emit({"asn1_NOVALUE -> [];",nl}),
+% end,
+ asn1ct_name:new(tmpval),
+ emit({{curr,tmpval}," ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ emit({nl,"%% attribute number ",Pos," with type ",
+ InnerType,nl}),
+ NextElement = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ gen_enc_line(TopType,Cname,Type,NextElement, Pos,DynamicEnc,Ext),
+ emit({nl,"end"}).
+
+gen_enc_component_mandatory(TopType,Cname,Type,Pos,DynamicEnc,Ext) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ emit({nl,"%% attribute number ",Pos," with type ",
+ InnerType,nl}),
+ gen_enc_line(TopType,Cname,Type,[],Pos,DynamicEnc,Ext).
+
+gen_enc_line(TopType, Cname, Type, [], Pos,DynamicEnc,Ext) ->
+% Element = io_lib:format("?RT_PER:cindex(~w,~s,~w)",[Pos+1,asn1ct_gen:mk_var(asn1ct_name:curr(val)),Cname]),
+ Element = make_element(Pos+1,asn1ct_gen:mk_var(asn1ct_name:curr(val)),Cname),
+ gen_enc_line(TopType,Cname,Type,Element, Pos,DynamicEnc,Ext);
+gen_enc_line(TopType,Cname,Type,Element, Pos,DynamicEnc,Ext) ->
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ Atype =
+ case Type of
+ #type{def=#'ObjectClassFieldType'{type=InnerType}} ->
+ InnerType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ case Ext of
+ {ext,Ep1,_} when Pos >= Ep1 ->
+ emit(["?RT_PER:encode_open_type(dummy,?RT_PER:complete("]);
+ _ -> true
+ end,
+ case Atype of
+ {typefield,_} ->
+ case DynamicEnc of
+ {_LeadingAttrName,Fun} ->
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% componentrelation) of
+ case (Type#type.def)#'ObjectClassFieldType'.fieldname of
+ {notype,T} ->
+ throw({error,{notype,type_from_object,T}});
+ {Name,RestFieldNames} when atom(Name) ->
+ emit({"?RT_PER:encode_open_type([],?RT_PER:complete(",nl}),
+ emit({" ",Fun,"(",{asis,Name},", ",
+ Element,", ",{asis,RestFieldNames},")))"});
+ Other ->
+ throw({asn1,{'internal error',Other}})
+ end
+ end;
+ {objectfield,PrimFieldName1,PFNList} ->
+ case DynamicEnc of
+ {_LeadingAttrName,Fun} ->
+ emit({"?RT_PER:encode_open_type([],"
+ "?RT_PER:complete(",nl}),
+ emit({" ",Fun,"(",{asis,PrimFieldName1},
+ ", ",Element,", ",{asis,PFNList},")))"})
+ end;
+ _ ->
+ CurrMod = get(currmod),
+ case asn1ct_gen:type(Atype) of
+ #'Externaltypereference'{module=Mod,type=EType} when
+ (CurrMod==Mod) ->
+ emit({"'enc_",EType,"'(",Element,")"});
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ emit({"'",Mod,"':'enc_",
+ EType,"'(",Element,")"});
+ #typereference{val=Ename} ->
+ emit({"'enc_",Ename,"'(",Element,")"});
+ {notype,_} ->
+ emit({"'enc_",Atype,"'(",Element,")"});
+ {primitive,bif} ->
+ EncType =
+ case Atype of
+ {fixedtypevaluefield,_,Btype} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ gen_encode_prim_wrapper(Ctgenmod,per,EncType,
+ false,Element);
+% Ctgenmod:gen_encode_prim(per,EncType,
+% false,Element);
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OpenType} ->
+ gen_encode_prim_wrapper(Ctgenmod,per,
+ #type{def=OpenType},
+ false,Element);
+ _ ->
+ gen_encode_prim_wrapper(Ctgenmod,per,Type,
+ false,Element)
+ end;
+% Ctgenmod:gen_encode_prim(per,Type,
+% false,Element);
+ {constructed,bif} ->
+ NewTypename = [Cname|TopType],
+ case {Type#type.tablecinf,DynamicEnc} of
+ {[{objfun,_}|_R],{_,EncFun}} ->
+%% emit({"?RT_PER:encode_open_type([],",
+%% "?RT_PER:complete(",nl}),
+ emit({"'enc_",
+ asn1ct_gen:list2name(NewTypename),
+ "'(",Element,", ",EncFun,")"});
+ _ ->
+ emit({"'enc_",
+ asn1ct_gen:list2name(NewTypename),
+ "'(",Element,")"})
+ end
+ end
+ end,
+ case Ext of
+ {ext,Ep2,_} when Pos >= Ep2 ->
+ emit(["))"]);
+ _ -> true
+ end.
+
+gen_dec_components_call(TopType,{CompList,ExtList},MaybeComma,DecInfObj,Ext,NumberOfOptionals) ->
+ %% The type has extensionmarker
+ {Rpos,AccTerm,AccBytes} =
+ gen_dec_components_call1(TopType, CompList, 1, 1, MaybeComma,DecInfObj,
+ noext,[],[],NumberOfOptionals),
+ emit([",",nl,"{Extensions,",{next,bytes},"} = "]),
+ emit(["?RT_PER:getextension(Ext,",{curr,bytes},"),",nl]),
+ asn1ct_name:new(bytes),
+ {_Epos,AccTermE,AccBytesE} =
+ gen_dec_components_call1(TopType,ExtList,Rpos, 1, "",DecInfObj,Ext,[],[],NumberOfOptionals),
+ case ExtList of
+ [] -> true;
+ _ -> emit([",",nl])
+ end,
+ emit([{next,bytes},"= ?RT_PER:skipextensions(",{curr,bytes},",",
+ length(ExtList)+1,",Extensions)",nl]),
+ asn1ct_name:new(bytes),
+ {AccTerm++AccTermE,AccBytes++AccBytesE};
+
+gen_dec_components_call(TopType,CompList,MaybeComma,DecInfObj,Ext,NumberOfOptionals) ->
+ %% The type has no extensionmarker
+ {_,AccTerm,AccBytes} =
+ gen_dec_components_call1(TopType, CompList, 1, 1,MaybeComma,DecInfObj,Ext,[],[],NumberOfOptionals),
+ {AccTerm,AccBytes}.
+
+
+gen_dec_components_call1(TopType,
+ [C=#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],
+ Tpos,OptPos,MaybeComma,DecInfObj,Ext,AccTerm,AccBytes,NumberOfOptionals) ->
+ Pos = case Ext of
+ noext -> Tpos;
+ {ext,Epos,_Enum} -> Tpos - Epos + 1
+ end,
+ emit(MaybeComma),
+%% asn1ct_name:new(term),
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=InType} ->
+ InType;
+ Def ->
+ asn1ct_gen:get_inner(Def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ case InnerType of
+ #'Externaltypereference'{type=T} ->
+ emit({nl,"%% attribute number ",Tpos," with type ",
+ T,nl});
+ IT when tuple(IT) ->
+ emit({nl,"%% attribute number ",Tpos," with type ",
+ element(2,IT),nl});
+ _ ->
+ emit({nl,"%% attribute number ",Tpos," with type ",
+ InnerType,nl})
+ end,
+
+ case InnerType of
+ {typefield,_} ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},"} = "});
+ {objectfield,_,_} ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},"} = "});
+ _ ->
+ asn1ct_name:new(term),
+ emit({"{",{curr,term},",",{next,bytes},"} = "})
+ end,
+
+ NewOptPos =
+ case {Ext,Prop} of
+ {noext,mandatory} -> OptPos; % generate nothing
+ {noext,_} ->
+ Element = io_lib:format("Opt band (1 bsl ~w)",[NumberOfOptionals - OptPos]),
+ emit({"case ",Element," of",nl}),
+ emit({"_Opt",OptPos," when _Opt",OptPos," > 0 ->"}),
+ OptPos+1;
+ _ ->
+ emit(["case Extensions of",nl]),
+ emit(["_ when size(Extensions) >= ",Pos,",element(",Pos,",Extensions) == 1 ->",nl])
+ end,
+ put(component_type,{true,C}),
+ {TermVar,BytesVar} = gen_dec_line(TopType,Cname,Type,Tpos,DecInfObj,Ext),
+ erase(component_type),
+ case {Ext,Prop} of
+ {noext,mandatory} -> true; % generate nothing
+ {noext,_} ->
+ emit([";",nl,"0 ->"]),
+ gen_dec_component_no_val(TopType,Cname,Type,Prop,Tpos,Ext);
+ _ ->
+ emit([";",nl,"_ ->",nl]),
+ gen_dec_component_no_val(TopType,Cname,Type,Prop,Tpos,Ext)
+ end,
+ case {Ext,Prop} of
+ {noext,mandatory} -> true; % generate nothing
+ {noext,_} ->
+ emit([nl,"end"]);
+ _ ->
+ emit([nl,"end"])
+
+ end,
+ asn1ct_name:new(bytes),
+ case Rest of
+ [] ->
+ {Pos+1,AccTerm++TermVar,AccBytes++BytesVar};
+ _ ->
+ emit({com,nl}),
+ gen_dec_components_call1(TopType,Rest,Tpos+1,NewOptPos,"",DecInfObj,Ext,
+ AccTerm++TermVar,AccBytes++BytesVar,NumberOfOptionals)
+ end;
+
+gen_dec_components_call1(_TopType,[],Pos,_OptPos,_,_,_,AccTerm,AccBytes,_NumberOfOptionals) ->
+ {Pos,AccTerm,AccBytes}.
+
+
+%%gen_dec_component_no_val(TopType,Cname,Type,_,Pos,{ext,Ep,Enum}) when Pos >= Ep ->
+%% emit({"{asn1_NOEXTVALUE,",{curr,bytes},"}",nl});
+gen_dec_component_no_val(_,_,_,{'DEFAULT',DefVal},_,_) ->
+ emit(["{",{asis,DefVal},",",{curr,bytes},"}",nl]);
+gen_dec_component_no_val(_,_,_,'OPTIONAL',_,_) ->
+ emit({"{asn1_NOVALUE,",{curr,bytes},"}",nl});
+gen_dec_component_no_val(_,_,_,mandatory,_,{ext,_,_}) ->
+ emit({"{asn1_NOVALUE,",{curr,bytes},"}",nl}).
+
+
+gen_dec_line(TopType,Cname,Type,Pos,DecInfObj,Ext) ->
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ Atype =
+ case Type of
+ #type{def=#'ObjectClassFieldType'{type=InnerType}} ->
+ InnerType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ BytesVar0 = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ BytesVar = case Ext of
+ {ext,Ep,_} when Pos >= Ep ->
+ emit(["begin",nl,"{TmpVal",Pos,",Trem",Pos,
+ "}=?RT_PER:decode_open_type(",
+ {curr,bytes},",[]),",nl,
+ "{TmpValx",Pos,",_}="]),
+ io_lib:format("TmpVal~p",[Pos]);
+ _ -> BytesVar0
+ end,
+ SaveBytes =
+ case Atype of
+ {typefield,_} ->
+ case DecInfObj of
+ false -> % This is in a choice with typefield components
+ {Name,RestFieldNames} =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+ emit([indent(2),"{",{curr,tmpterm},", ",{next,bytes},
+ "} = ?RT_PER:decode_open_type(",{curr,bytes},
+ ", []),",nl]),
+ emit([indent(2),"case (catch ObjFun(",
+ {asis,Name},
+ ",",{curr,tmpterm},",telltype,",
+ {asis,RestFieldNames},")) of", nl]),
+ emit([indent(4),"{'EXIT',",{curr,reason},"} ->",nl]),
+ emit([indent(6),"exit({'Type not ",
+ "compatible with table constraint', ",
+ {curr,reason},"});",nl]),
+ asn1ct_name:new(tmpterm),
+ emit([indent(4),"{",{curr,tmpterm},", _} ->",nl]),
+ emit([indent(6),"{",Cname,", {",{curr,tmpterm},", ",
+ {next,bytes},"}}",nl]),
+ emit([indent(2),"end"]),
+ [];
+ {"got objfun through args","ObjFun"} ->
+ %% this is when the generated code gots the
+ %% objfun though arguments on function
+ %% invocation.
+ {Name,RestFieldNames} =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ emit(["?RT_PER:decode_open_type(",{curr,bytes},
+ ", []),",nl]),
+ emit([{curr,term}," =",nl,
+ " case (catch ObjFun(",{asis,Name},",",
+ {curr,tmpterm},",telltype,",
+ {asis,RestFieldNames},")) of", nl]),
+ emit([" {'EXIT',",{curr,reason},"} ->",nl]),
+ emit([indent(6),"exit({'Type not ",
+ "compatible with table constraint', ",
+ {curr,reason},"});",nl]),
+ asn1ct_name:new(tmpterm),
+ emit([indent(4),"{",{curr,tmpterm},", _} ->",nl]),
+ emit([indent(6),{curr,tmpterm},nl]),
+ emit([indent(2),"end"]),
+ [];
+ _ ->
+ emit({"?RT_PER:decode_open_type(",{curr,bytes},
+ ", [])"}),
+ RefedFieldName =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ [{Cname,RefedFieldName,
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ get_components_prop()}]
+ end;
+ {objectfield,PrimFieldName1,PFNList} ->
+ emit({"?RT_PER:decode_open_type(",{curr,bytes},", [])"}),
+ [{Cname,{PrimFieldName1,PFNList},
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ get_components_prop()}];
+ _ ->
+ CurrMod = get(currmod),
+ case asn1ct_gen:type(Atype) of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ emit({"'dec_",EType,"'(",BytesVar,",telltype)"});
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ emit({"'",Mod,"':'dec_",EType,"'(",BytesVar,
+ ",telltype)"});
+ {primitive,bif} ->
+ case Atype of
+ {fixedtypevaluefield,_,Btype} ->
+ Ctgenmod:gen_dec_prim(per,Btype,
+ BytesVar);
+ _ ->
+ Ctgenmod:gen_dec_prim(per,Type,
+ BytesVar)
+ end;
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OpenType} ->
+ Ctgenmod:gen_dec_prim(per,#type{def=OpenType},
+ BytesVar);
+ _ ->
+ Ctgenmod:gen_dec_prim(per,Type,
+ BytesVar)
+ end;
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(",BytesVar,",telltype)"});
+ {notype,_} ->
+ emit({"'dec_",Atype,"'(",BytesVar,",telltype)"});
+ {constructed,bif} ->
+ NewTypename = [Cname|TopType],
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ emit({"'dec_",asn1ct_gen:list2name(NewTypename),
+ "'(",BytesVar,", telltype, ObjFun)"});
+ _ ->
+ emit({"'dec_",asn1ct_gen:list2name(NewTypename),
+ "'(",BytesVar,", telltype)"})
+ end
+ end,
+ case DecInfObj of
+ {Cname,{_,OSet,UniqueFName,ValIndex}} ->
+ Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ ValueMatch = value_match(ValIndex,Term),
+ emit({",",nl,"ObjFun = 'getdec_",OSet,"'(",
+ {asis,UniqueFName},", ",ValueMatch,")"});
+ _ ->
+ ok
+ end,
+ []
+ end,
+ case Ext of
+ {ext,Ep2,_} when Pos >= Ep2 ->
+ emit([", {TmpValx",Pos,",Trem",Pos,"}",nl,"end"]);
+ _ -> true
+ end,
+ %% Prepare return value
+ case DecInfObj of
+ {Cname,ObjSet} ->
+ {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
+ SaveBytes};
+ _ ->
+ {[],SaveBytes}
+ end.
+
+gen_enc_choice(TopType,CompList,Ext) ->
+ gen_enc_choice_tag(CompList, [], Ext),
+ emit({com,nl}),
+ emit({"case element(1,Val) of",nl}),
+ gen_enc_choice2(TopType, CompList, Ext),
+ emit({nl,"end"}).
+
+gen_enc_choice_tag({C1,C2},_,_) ->
+ N1 = get_name_list(C1),
+ N2 = get_name_list(C2),
+ emit(["?RT_PER:set_choice(element(1,Val),",
+ {asis,{N1,N2}},", ",{asis,{length(N1),length(N2)}},")"]);
+gen_enc_choice_tag(C,_,_) ->
+ N = get_name_list(C),
+ emit(["?RT_PER:set_choice(element(1,Val),",
+ {asis,N},", ",{asis,length(N)},")"]).
+
+get_name_list(L) ->
+ get_name_list(L,[]).
+
+get_name_list([#'ComponentType'{name=Name}|T], Acc) ->
+ get_name_list(T,[Name|Acc]);
+get_name_list([], Acc) ->
+ lists:reverse(Acc).
+
+%gen_enc_choice_tag([H|T],Acc,Ext) when record(H,'ComponentType') ->
+% gen_enc_choice_tag(T,[H#'ComponentType'.name|Acc],Ext);
+%gen_enc_choice_tag([H|T],Acc,Ext) -> % skip EXTENSIONMARK
+% gen_enc_choice_tag(T,Acc,Ext);
+%gen_enc_choice_tag([],Acc,Ext) ->
+% Length = length(Acc),
+% emit({"?RT_PER:set_choice(element(1,Val),",{asis,Length},",",
+% {asis,lists:reverse(Acc)},",",{asis,Ext},")"}),
+% Length.
+
+gen_enc_choice2(TopType, {L1,L2}, Ext) ->
+ gen_enc_choice2(TopType, L1 ++ L2, 0, Ext);
+gen_enc_choice2(TopType, L, Ext) ->
+ gen_enc_choice2(TopType, L, 0, Ext).
+
+gen_enc_choice2(TopType,[H1,H2|T], Pos, Ext)
+when record(H1,'ComponentType'), record(H2,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ EncObj =
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% false;
+% _ ->
+% {no_attr,"ObjFun"}
+% end,
+ case asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation) of
+ no -> false;
+ _ -> {no_attr,"ObjFun"}
+ end,
+ emit({{asis,Cname}," ->",nl}),
+ gen_enc_line(TopType,Cname,Type,"element(2,Val)", Pos+1,EncObj,Ext),
+ emit({";",nl}),
+ gen_enc_choice2(TopType,[H2|T], Pos+1, Ext);
+gen_enc_choice2(TopType,[H1|T], Pos, Ext) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ EncObj =
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% false;
+% _ ->
+% {no_attr,"ObjFun"}
+% end,
+ case asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation) of
+ no -> false;
+ _ -> {no_attr,"ObjFun"}
+ end,
+ emit({{asis,H1#'ComponentType'.name}," ->",nl}),
+ gen_enc_line(TopType,Cname,Type,"element(2,Val)", Pos+1,EncObj,Ext),
+ gen_enc_choice2(TopType,T, Pos+1, Ext);
+gen_enc_choice2(_,[], _, _) ->
+ true.
+
+gen_dec_choice(TopType,CompList,{ext,Pos,NumExt}) ->
+ emit({"{Ext,",{curr,bytes},"} = ?RT_PER:getbit(Bytes),",nl}),
+ asn1ct_name:new(bytes),
+ gen_dec_choice1(TopType,CompList,{ext,Pos,NumExt});
+gen_dec_choice(TopType,CompList,noext) ->
+ gen_dec_choice1(TopType,CompList,noext).
+
+gen_dec_choice1(TopType,CompList,noext) ->
+ emit({"{Choice,",{curr,bytes},
+ "} = ?RT_PER:getchoice(",{prev,bytes},",",
+ length(CompList),", 0),",nl}),
+ emit({"{Cname,{Val,NewBytes}} = case Choice of",nl}),
+ gen_dec_choice2(TopType,CompList,noext),
+ emit({nl,"end,",nl}),
+ emit({nl,"{{Cname,Val},NewBytes}"});
+gen_dec_choice1(TopType,{RootList,ExtList},Ext) ->
+ NewList = RootList ++ ExtList,
+ gen_dec_choice1(TopType, NewList, Ext);
+gen_dec_choice1(TopType,CompList,{ext,ExtPos,ExtNum}) ->
+ emit({"{Choice,",{curr,bytes},
+ "} = ?RT_PER:getchoice(",{prev,bytes},",",
+ length(CompList)-ExtNum,",Ext ),",nl}),
+ emit({"{Cname,{Val,NewBytes}} = case Choice + Ext*",ExtPos-1," of",nl}),
+ gen_dec_choice2(TopType,CompList,{ext,ExtPos,ExtNum}),
+ emit([";",nl,"_ -> {asn1_ExtAlt, ?RT_PER:decode_open_type(",{curr,bytes},",[])}"]),
+ emit({nl,"end,",nl}),
+ emit({nl,"{{Cname,Val},NewBytes}"}).
+
+
+gen_dec_choice2(TopType,L,Ext) ->
+ gen_dec_choice2(TopType,L,0,Ext).
+
+gen_dec_choice2(TopType,[H1,H2|T],Pos,Ext)
+when record(H1,'ComponentType'), record(H2,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ case Type#type.def of
+ #'ObjectClassFieldType'{type={typefield,_}} ->
+ emit({Pos," -> ",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext),
+ emit({";",nl});
+ _ ->
+ emit({Pos," -> {",{asis,Cname},",",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext),
+ emit({"};",nl})
+ end,
+ gen_dec_choice2(TopType,[H2|T],Pos+1,Ext);
+gen_dec_choice2(TopType,[H1,_H2|T],Pos,Ext) when record(H1,'ComponentType') ->
+ gen_dec_choice2(TopType,[H1|T],Pos,Ext); % skip extensionmark
+gen_dec_choice2(TopType,[H1|T],Pos,Ext) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ case Type#type.def of
+ #'ObjectClassFieldType'{type={typefield,_}} ->
+ emit({Pos," -> ",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext);
+ _ ->
+ emit({Pos," -> {",{asis,Cname},",",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext),
+ emit("}")
+ end,
+ gen_dec_choice2(TopType,[T],Pos+1);
+gen_dec_choice2(TopType,[_|T],Pos,Ext) ->
+ gen_dec_choice2(TopType,T,Pos,Ext);% skip extensionmark
+gen_dec_choice2(_,[],Pos,_) ->
+ Pos.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+gen_encode_prim_wrapper(CtgenMod,Erule,Cont,DoTag,Value) ->
+% put(component_type,true), % add more info in component_type
+ CtgenMod:gen_encode_prim(Erule,Cont,DoTag,Value).
+% erase(component_type).
+
+make_element(I,Val,Cname) ->
+ case lists:member(optimize,get(encoding_options)) of
+ false ->
+ io_lib:format("?RT_PER:cindex(~w,~s,~w)",[I,Val,Cname]);
+ _ ->
+ io_lib:format("element(~w,~s)",[I,Val])
+ end.
+
+wrap_gen_dec_line(C,TopType,Cname,Type,Pos,DIO,Ext) ->
+ put(component_type,{true,C}),
+ gen_dec_line(TopType,Cname,Type,Pos,DIO,Ext),
+ erase(component_type).
+
+get_components_prop() ->
+ case get(component_type) of
+ undefined ->
+ mandatory;
+ {true,#'ComponentType'{prop=Prop}} -> Prop
+ end.
+
+
+value_match(Index,Value) when atom(Value) ->
+ value_match(Index,atom_to_list(Value));
+value_match([],Value) ->
+ Value;
+value_match([{VI,_}|VIs],Value) ->
+ value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
+value_match1(Value,[],Acc,Depth) ->
+ Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
+value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
+ value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
+
+notice_value_match() ->
+ Module = get(currmod),
+ put(value_match,{true,Module}).
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen.erl
new file mode 100644
index 0000000000..e4a0b1fd9a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen.erl
@@ -0,0 +1,1664 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_gen).
+
+-include("asn1_records.hrl").
+%%-compile(export_all).
+-export([pgen_exports/3,
+ pgen_hrl/4,
+ gen_head/3,
+ demit/1,
+ emit/1,
+ fopen/2,
+ get_inner/1,type/1,def_to_tag/1,prim_bif/1,
+ type_from_object/1,
+ get_typefromobject/1,get_fieldcategory/2,
+ get_classfieldcategory/2,
+ list2name/1,
+ list2rname/1,
+ constructed_suffix/2,
+ unify_if_string/1,
+ gen_check_call/7,
+ get_constraint/2,
+ insert_once/2,
+ rt2ct_suffix/1,rt2ct_suffix/0]).
+-export([pgen/4,pgen_module/5,mk_var/1, un_hyphen_var/1]).
+-export([gen_encode_constructed/4,gen_decode_constructed/4]).
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber | ber_bin | per_bin
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+pgen_module(OutFile,Erules,Module,TypeOrVal,Indent) ->
+ put(outfile,OutFile),
+ HrlGenerated = asn1ct_gen:pgen_hrl(Erules,Module,TypeOrVal,Indent),
+ asn1ct_name:start(),
+ ErlFile = lists:concat([OutFile,".erl"]),
+ Fid = asn1ct_gen:fopen(ErlFile,write),
+ put(gen_file_out,Fid),
+ asn1ct_gen:gen_head(Erules,Module,HrlGenerated),
+ pgen_exports(Erules,Module,TypeOrVal),
+ pgen_dispatcher(Erules,Module,TypeOrVal),
+ pgen_info(Erules,Module),
+ pgen_typeorval(wrap_ber(Erules),Module,TypeOrVal),
+ pgen_partial_incomplete_decode(Erules),
+% gen_vars(asn1_db:mod_to_vars(Module)),
+% gen_tag_table(AllTypes),
+ file:close(Fid),
+ io:format("--~p--~n",[{generated,ErlFile}]).
+
+
+pgen_typeorval(Erules,Module,{Types,Values,_Ptypes,_Classes,Objects,ObjectSets}) ->
+ pgen_types(Erules,Module,Types),
+ pgen_values(Erules,Module,Values),
+ pgen_objects(Erules,Module,Objects),
+ pgen_objectsets(Erules,Module,ObjectSets),
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ pgen_check_defaultval(Erules,Module);
+ _ -> ok
+ end,
+ pgen_partial_decode(Erules,Module).
+
+pgen_values(_,_,[]) ->
+ true;
+pgen_values(Erules,Module,[H|T]) ->
+ Valuedef = asn1_db:dbget(Module,H),
+ gen_value(Valuedef),
+ pgen_values(Erules,Module,T).
+
+pgen_types(_,Module,[]) ->
+ gen_value_match(Module),
+ true;
+pgen_types(Erules,Module,[H|T]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Typedef = asn1_db:dbget(Module,H),
+ Rtmod:gen_encode(Erules,Typedef),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,Typedef),
+ pgen_types(Erules,Module,T).
+
+pgen_objects(_,_,[]) ->
+ true;
+pgen_objects(Erules,Module,[H|T]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Typedef = asn1_db:dbget(Module,H),
+ Rtmod:gen_obj_code(Erules,Module,Typedef),
+ pgen_objects(Erules,Module,T).
+
+pgen_objectsets(_,_,[]) ->
+ true;
+pgen_objectsets(Erules,Module,[H|T]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ TypeDef = asn1_db:dbget(Module,H),
+ Rtmod:gen_objectset_code(Erules,TypeDef),
+ pgen_objectsets(Erules,Module,T).
+
+pgen_check_defaultval(Erules,Module) ->
+ CheckObjects = ets:tab2list(check_functions),
+ case get(asndebug) of
+ true ->
+ FileName = lists:concat([Module,'.table']),
+ {ok,IoDevice} = file:open(FileName,[write]),
+ Fun =
+ fun(X)->
+ io:format(IoDevice,"~n~n************~n~n~p~n~n*****"
+ "********~n~n",[X])
+ end,
+ lists:foreach(Fun,CheckObjects),
+ file:close(IoDevice);
+ _ -> ok
+ end,
+ gen_check_defaultval(Erules,Module,CheckObjects).
+
+pgen_partial_decode(Erules,Module) ->
+ pgen_partial_inc_dec(Erules,Module),
+ pgen_partial_dec(Erules,Module).
+
+pgen_partial_inc_dec(Erules,Module) ->
+% io:format("Start partial incomplete decode gen?~n"),
+ case asn1ct:get_gen_state_field(inc_type_pattern) of
+ undefined ->
+% io:format("Partial incomplete decode gen not started:�~w~n",[asn1ct:get_gen_state_field(active)]),
+ ok;
+% [] ->
+% ok;
+ ConfList ->
+ PatternLists=lists:map(fun({_,P}) -> P end,ConfList),
+ pgen_partial_inc_dec1(Erules,Module,PatternLists),
+ gen_partial_inc_dec_refed_funcs(Erules)
+ end.
+
+%% pgen_partial_inc_dec1 generates a function of the toptype in each
+%% of the partial incomplete decoded types.
+pgen_partial_inc_dec1(Erules,Module,[P|Ps]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ TopTypeName = asn1ct:partial_inc_dec_toptype(P),
+ TypeDef=asn1_db:dbget(Module,TopTypeName),
+ asn1ct_name:clear(),
+ asn1ct:update_gen_state(namelist,P),
+ asn1ct:update_gen_state(active,true),
+ asn1ct:update_gen_state(prefix,"dec-inc-"),
+ Rtmod:gen_decode(Erules,TypeDef),
+%% asn1ct:update_gen_state(namelist,tl(P)), %%
+ gen_dec_part_inner_constr(Erules,TypeDef,[TopTypeName]),
+ pgen_partial_inc_dec1(Erules,Module,Ps);
+pgen_partial_inc_dec1(_,_,[]) ->
+ ok.
+
+gen_partial_inc_dec_refed_funcs(Erule) when Erule == ber_bin_v2 ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erule),
+ rt2ct_suffix(Erule)])),
+ case asn1ct:next_refed_func() of
+ [] ->
+ ok;
+ {#'Externaltypereference'{module=M,type=Name},Pattern} ->
+ TypeDef = asn1_db:dbget(M,Name),
+ asn1ct:update_gen_state(namelist,Pattern),
+ Rtmod:gen_inc_decode(Erule,TypeDef),
+ gen_dec_part_inner_constr(Erule,TypeDef,[Name]),
+ gen_partial_inc_dec_refed_funcs(Erule);
+ _ ->
+ gen_partial_inc_dec_refed_funcs(Erule)
+ end;
+gen_partial_inc_dec_refed_funcs(_) ->
+ ok.
+
+pgen_partial_dec(_Erules,_Module) ->
+ ok. %%%% implement later
+
+%% generate code for all inner types that are called from the top type
+%% of the partial incomplete decode
+gen_dec_part_inner_constr(Erules,TypeDef,TypeName) ->
+ Def = TypeDef#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case InnerType of
+ 'SET' ->
+ #'SET'{components=Components} = Def#type.def,
+ gen_dec_part_inner_types(Erules,Components,TypeName);
+ %% Continue generate the inner of each component
+ 'SEQUENCE' ->
+ #'SEQUENCE'{components=Components} = Def#type.def,
+ gen_dec_part_inner_types(Erules,Components,TypeName);
+ 'CHOICE' ->
+ {_,Components} = Def#type.def,
+ gen_dec_part_inner_types(Erules,Components,TypeName);
+ 'SEQUENCE OF' ->
+ %% this and next case must be the last component in the
+ %% partial decode chain here. Not likely that this occur.
+ {_,Type} = Def#type.def,
+ NameSuffix = constructed_suffix(InnerType,Type#type.def),
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,[NameSuffix|TypeName],Type);
+%% gen_types(Erules,[NameSuffix|Typename],Type);
+ 'SET OF' ->
+ {_,Type} = Def#type.def,
+ NameSuffix = constructed_suffix(InnerType,Type#type.def),
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,[NameSuffix|TypeName],Type);
+ _ ->
+ ok
+ end.
+
+gen_dec_part_inner_types(Erules,[ComponentType|Rest],TypeName) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,TypeName,ComponentType),
+ gen_dec_part_inner_types(Erules,Rest,TypeName);
+gen_dec_part_inner_types(Erules,{Comps1,Comps2},TypeName)
+ when list(Comps1),list(Comps2) ->
+ gen_dec_part_inner_types(Erules,Comps1 ++ Comps2,TypeName);
+gen_dec_part_inner_types(_,[],_) ->
+ ok.
+
+
+pgen_partial_incomplete_decode(Erule) ->
+ case asn1ct:get_gen_state_field(active) of
+ true ->
+ pgen_partial_incomplete_decode1(Erule),
+ asn1ct:reset_gen_state();
+ _ ->
+ ok
+ end.
+pgen_partial_incomplete_decode1(ber_bin_v2) ->
+ case asn1ct:read_config_data(partial_incomplete_decode) of
+ undefined ->
+ ok;
+ Data ->
+ lists:foreach(fun emit_partial_incomplete_decode/1,Data)
+ end,
+ GeneratedFs= asn1ct:get_gen_state_field(gen_refed_funcs),
+% io:format("GeneratedFs :~n~p~n",[GeneratedFs]),
+ gen_part_decode_funcs(GeneratedFs,0);
+pgen_partial_incomplete_decode1(_) -> ok.
+
+emit_partial_incomplete_decode({FuncName,TopTypeName,Pattern}) ->
+ emit([{asis,FuncName},"(Bytes) ->",nl,
+ " decode_partial_incomplete(",{asis,TopTypeName},",Bytes,",{asis,Pattern},").",nl]);
+emit_partial_incomplete_decode(D) ->
+ throw({error,{asn1,{"bad data in asn1config file",D}}}).
+
+gen_part_decode_funcs([Data={Name,_,_,Type}|GeneratedFs],N) ->
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ get_inner(Type#type.def)
+ end,
+ WhatKind = type(InnerType),
+ TypeName=list2name(Name),
+ if
+ N > 0 -> emit([";",nl]);
+ true -> ok
+ end,
+ emit(["decode_inc_disp('",TypeName,"',Data) ->",nl]),
+ gen_part_decode_funcs(WhatKind,TypeName,Data),
+ gen_part_decode_funcs(GeneratedFs,N+1);
+gen_part_decode_funcs([_H|T],N) ->
+ gen_part_decode_funcs(T,N);
+gen_part_decode_funcs([],N) ->
+ if
+ N > 0 ->
+ .emit([".",nl]);
+ true ->
+ ok
+ end.
+
+gen_part_decode_funcs(#'Externaltypereference'{module=M,type=T},
+ _TypeName,Data) ->
+ #typedef{typespec=TS} = asn1_db:dbget(M,T),
+ InnerType =
+ case TS#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ get_inner(TS#type.def)
+ end,
+ WhatKind = type(InnerType),
+ gen_part_decode_funcs(WhatKind,[T],Data);
+gen_part_decode_funcs({constructed,bif},TypeName,
+ {_Name,parts,Tag,_Type}) ->
+ emit([" case Data of",nl,
+ " L when list(L) ->",nl,
+ " 'dec_",TypeName,"'(lists:map(fun(X)->element(1,?RT_BER:decode(X)) end,L),",{asis,Tag},");",nl,
+ " _ ->",nl,
+ " [Res] = 'dec_",TypeName,"'([Data],",{asis,Tag},"),",nl,
+ " Res",nl,
+ " end"]);
+gen_part_decode_funcs(WhatKind,_TypeName,{_Name,parts,_Tag,_Type}) ->
+ throw({error,{asn1,{"only SEQUENCE OF/SET OF may have the partial incomplete directive 'parts'.",WhatKind}}});
+gen_part_decode_funcs({constructed,bif},TypeName,
+ {_Name,undecoded,Tag,_Type}) ->
+ emit([" 'dec_",TypeName,"'(Data,",{asis,Tag},")"]);
+gen_part_decode_funcs({primitive,bif},_TypeName,
+ {_Name,undecoded,Tag,Type}) ->
+ % Argument no 6 is 0, i.e. bit 6 for primitive encoding.
+ asn1ct_gen_ber_bin_v2:gen_dec_prim(ber_bin_v2,Type,"Data",Tag,[],0,", mandatory, ");
+gen_part_decode_funcs(WhatKind,_TypeName,{_,Directive,_,_}) ->
+ throw({error,{asn1,{"Not implemented yet",WhatKind," partial incomplete directive:",Directive}}}).
+
+gen_types(Erules,Tname,{RootList,ExtList}) when list(RootList) ->
+ gen_types(Erules,Tname,RootList),
+ gen_types(Erules,Tname,ExtList);
+gen_types(Erules,Tname,[{'EXTENSIONMARK',_,_}|Rest]) ->
+ gen_types(Erules,Tname,Rest);
+gen_types(Erules,Tname,[ComponentType|Rest]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_encode(Erules,Tname,ComponentType),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,Tname,ComponentType),
+ gen_types(Erules,Tname,Rest);
+gen_types(_,_,[]) ->
+ true;
+gen_types(Erules,Tname,Type) when record(Type,type) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_encode(Erules,Tname,Type),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,Tname,Type).
+
+gen_value_match(Module) ->
+ case get(value_match) of
+ {true,Module} ->
+ emit(["value_match([{Index,Cname}|Rest],Value) ->",nl,
+ " Value2 =",nl,
+ " case element(Index,Value) of",nl,
+ " {Cname,Val2} -> Val2;",nl,
+ " X -> X",nl,
+ " end,",nl,
+ " value_match(Rest,Value2);",nl,
+ "value_match([],Value) ->",nl,
+ " Value.",nl]);
+ _ -> ok
+ end,
+ put(value_match,undefined).
+
+gen_check_defaultval(Erules,Module,[{Name,Type}|Rest]) ->
+ gen_check_func(Name,Type),
+ gen_check_defaultval(Erules,Module,Rest);
+gen_check_defaultval(_,_,[]) ->
+ ok.
+
+gen_check_func(Name,FType = #type{def=Def}) ->
+ emit({Name,"(V,asn1_DEFAULT) ->",nl," true;",nl}),
+ emit({Name,"(V,V) ->",nl," true;",nl}),
+ emit({Name,"(V,{_,V}) ->",nl," true;",nl}),
+ case Def of
+ {'SEQUENCE OF',Type} ->
+ gen_check_sof(Name,'SEQOF',Type);
+ {'SET OF',Type} ->
+ gen_check_sof(Name,'SETOF',Type);
+ #'SEQUENCE'{components=Components} ->
+ gen_check_sequence(Name,Components);
+ #'SET'{components=Components} ->
+ gen_check_sequence(Name,Components);
+ {'CHOICE',Components} ->
+ gen_check_choice(Name,Components);
+ #'Externaltypereference'{type=T} ->
+ emit({Name,"(DefaultValue,Value) ->",nl}),
+ emit({" ",list2name([T,check]),"(DefaultValue,Value).",nl});
+ MaybePrim ->
+ InnerType = get_inner(MaybePrim),
+ case type(InnerType) of
+ {primitive,bif} ->
+ emit({Name,"(DefaultValue,Value) ->",nl," "}),
+ gen_prim_check_call(InnerType,"DefaultValue","Value",
+ FType),
+ emit({".",nl,nl});
+ _ ->
+ throw({asn1_error,{unknown,type,MaybePrim}})
+ end
+ end.
+
+gen_check_sof(Name,SOF,Type) ->
+ NewName = list2name([sorted,Name]),
+ emit({Name,"(V1,V2) ->",nl}),
+ emit({" ",NewName,"(lists:sort(V1),lists:sort(V2)).",nl,nl}),
+ emit({NewName,"([],[]) ->",nl," true;",nl}),
+ emit({NewName,"([DV|DVs],[V|Vs]) ->",nl," "}),
+ InnerType = get_inner(Type#type.def),
+ case type(InnerType) of
+ {primitive,bif} ->
+ gen_prim_check_call(InnerType,"DV","V",Type),
+ emit({",",nl});
+ {constructed,bif} ->
+ emit({list2name([SOF,Name]),"(DV, V),",nl});
+ #'Externaltypereference'{type=T} ->
+ emit({list2name([T,check]),"(DV,V),",nl})
+ end,
+ emit({" ",NewName,"(DVs,Vs).",nl,nl}).
+
+gen_check_sequence(Name,Components) ->
+ emit({Name,"(DefaultValue,Value) ->",nl}),
+ gen_check_sequence(Name,Components,1).
+gen_check_sequence(Name,[#'ComponentType'{name=N,typespec=Type}|Cs],Num) ->
+ InnerType = get_inner(Type#type.def),
+% NthDefV = lists:concat(["lists:nth(",Num,",DefaultValue)"]),
+ NthDefV = ["element(",Num+1,",DefaultValue)"],
+% NthV = lists:concat(["lists:nth(",Num,",Value)"]),
+ NthV = ["element(",Num+1,",Value)"],
+ gen_check_func_call(Name,Type,InnerType,NthDefV,NthV,N),
+ case Cs of
+ [] ->
+ emit({".",nl,nl});
+ _ ->
+ emit({",",nl}),
+ gen_check_sequence(Name,Cs,Num+1)
+ end;
+gen_check_sequence(_,[],_) ->
+ ok.
+
+gen_check_choice(Name,CList=[#'ComponentType'{}|_Cs]) ->
+ emit({Name,"({Id,DefaultValue},{Id,Value}) ->",nl}),
+ emit({" case Id of",nl}),
+ gen_check_choice_components(Name,CList,1).
+
+gen_check_choice_components(_,[],_)->
+ ok;
+gen_check_choice_components(Name,[#'ComponentType'{name=N,typespec=Type}|
+ Cs],Num) ->
+ Ind6 = " ",
+ InnerType = get_inner(Type#type.def),
+% DefVal = ["element(2,lists:nth(",Num,",DefaultValue))"],
+ emit({Ind6,N," ->",nl,Ind6}),
+ gen_check_func_call(Name,Type,InnerType,{var,"defaultValue"},
+ {var,"value"},N),
+ case Cs of
+ [] ->
+ emit({nl," end.",nl,nl});
+ _ ->
+ emit({";",nl}),
+ gen_check_choice_components(Name,Cs,Num+1)
+ end.
+
+gen_check_func_call(Name,Type,InnerType,DefVal,Val,N) ->
+ case type(InnerType) of
+ {primitive,bif} ->
+ emit(" "),
+ gen_prim_check_call(InnerType,DefVal,Val,Type);
+ #'Externaltypereference'{type=T} ->
+ emit({" ",list2name([T,check]),"(",DefVal,",",Val,")"});
+ _ ->
+ emit({" ",list2name([N,Name]),"(",DefVal,",",Val,")"})
+ end.
+
+
+%% VARIOUS GENERATOR STUFF
+%% *************************************************
+%%**************************************************
+
+mk_var(X) when atom(X) ->
+ list_to_atom(mk_var(atom_to_list(X)));
+
+mk_var([H|T]) ->
+ [H-32|T].
+
+%% Since hyphens are allowed in ASN.1 names, it may occur in a
+%% variable to. Turn a hyphen into a under-score sign.
+un_hyphen_var(X) when atom(X) ->
+ list_to_atom(un_hyphen_var(atom_to_list(X)));
+un_hyphen_var([45|T]) ->
+ [95|un_hyphen_var(T)];
+un_hyphen_var([H|T]) ->
+ [H|un_hyphen_var(T)];
+un_hyphen_var([]) ->
+ [].
+
+%% Generate value functions ***************
+%% ****************************************
+%% Generates a function 'V'/0 for each Value V defined in the ASN.1 module
+%% the function returns the value in an Erlang representation which can be
+%% used as input to the runtime encode functions
+
+gen_value(Value) when record(Value,valuedef) ->
+%% io:format(" ~w ",[Value#valuedef.name]),
+ emit({"'",Value#valuedef.name,"'() ->",nl}),
+ V = Value#valuedef.value,
+ emit([{asis,V},".",nl,nl]).
+
+gen_encode_constructed(Erules,Typename,InnerType,D) when record(D,type) ->
+
+ Rtmod = list_to_atom(lists:concat(["asn1ct_constructed_",erule(Erules)])),
+ case InnerType of
+ 'SET' ->
+ Rtmod:gen_encode_set(Erules,Typename,D),
+ #'SET'{components=Components} = D#type.def,
+ gen_types(Erules,Typename,Components);
+ 'SEQUENCE' ->
+ Rtmod:gen_encode_sequence(Erules,Typename,D),
+ #'SEQUENCE'{components=Components} = D#type.def,
+ gen_types(Erules,Typename,Components);
+ 'CHOICE' ->
+ Rtmod:gen_encode_choice(Erules,Typename,D),
+ {_,Components} = D#type.def,
+ gen_types(Erules,Typename,Components);
+ 'SEQUENCE OF' ->
+ Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ gen_types(Erules,[NameSuffix|Typename],Type);
+ 'SET OF' ->
+ Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ gen_types(Erules,[NameSuffix|Typename],Type);
+ _ ->
+ exit({nyi,InnerType})
+ end;
+gen_encode_constructed(Erules,Typename,InnerType,D)
+ when record(D,typedef) ->
+ gen_encode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
+
+gen_decode_constructed(Erules,Typename,InnerType,D) when record(D,type) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_constructed_",erule(Erules)])),
+ asn1ct:step_in_constructed(), %% updates namelist for incomplete
+ %% partial decode
+ case InnerType of
+ 'SET' ->
+ Rtmod:gen_decode_set(Erules,Typename,D);
+ 'SEQUENCE' ->
+ Rtmod:gen_decode_sequence(Erules,Typename,D);
+ 'CHOICE' ->
+ Rtmod:gen_decode_choice(Erules,Typename,D);
+ 'SEQUENCE OF' ->
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
+ 'SET OF' ->
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
+ _ ->
+ exit({nyi,InnerType})
+ end;
+
+
+gen_decode_constructed(Erules,Typename,InnerType,D) when record(D,typedef) ->
+ gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
+
+
+pgen_exports(Erules,_Module,{Types,Values,_,_,Objects,ObjectSets}) ->
+ emit({"-export([encoding_rule/0]).",nl}),
+ case Types of
+ [] -> ok;
+ _ ->
+ emit({"-export([",nl}),
+ case Erules of
+ ber ->
+ gen_exports1(Types,"enc_",2);
+ ber_bin ->
+ gen_exports1(Types,"enc_",2);
+ ber_bin_v2 ->
+ gen_exports1(Types,"enc_",2);
+ _ ->
+ gen_exports1(Types,"enc_",1)
+ end,
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",2),
+ case Erules of
+ ber ->
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",3);
+ ber_bin ->
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",3);
+ ber_bin_v2 ->
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",2);
+ _ -> ok
+ end
+ end,
+ case Values of
+ [] -> ok;
+ _ ->
+ emit({"-export([",nl}),
+ gen_exports1(Values,"",0)
+ end,
+ case Objects of
+ [] -> ok;
+ _ ->
+ case erule(Erules) of
+ per ->
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"enc_",3),
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"dec_",4);
+ ber_bin_v2 ->
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"enc_",3),
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"dec_",3);
+ _ ->
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"enc_",4),
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"dec_",4)
+ end
+ end,
+ case ObjectSets of
+ [] -> ok;
+ _ ->
+ emit({"-export([",nl}),
+ gen_exports1(ObjectSets,"getenc_",2),
+ emit({"-export([",nl}),
+ gen_exports1(ObjectSets,"getdec_",2)
+ end,
+ emit({"-export([info/0]).",nl}),
+ gen_partial_inc_decode_exports(),
+ emit({nl,nl}).
+
+gen_exports1([F1,F2|T],Prefix,Arity) ->
+ emit({"'",Prefix,F1,"'/",Arity,com,nl}),
+ gen_exports1([F2|T],Prefix,Arity);
+gen_exports1([Flast|_T],Prefix,Arity) ->
+ emit({"'",Prefix,Flast,"'/",Arity,nl,"]).",nl,nl}).
+
+gen_partial_inc_decode_exports() ->
+ case {asn1ct:read_config_data(partial_incomplete_decode),
+ asn1ct:get_gen_state_field(inc_type_pattern)} of
+ {undefined,_} ->
+ ok;
+ {_,undefined} ->
+ ok;
+ {Data,_} ->
+ gen_partial_inc_decode_exports(Data),
+ emit("-export([decode_part/2]).")
+ end.
+gen_partial_inc_decode_exports([]) ->
+ ok;
+gen_partial_inc_decode_exports([{Name,_,_}|Rest]) ->
+ emit(["-export([",Name,"/1"]),
+ gen_partial_inc_decode_exports1(Rest);
+gen_partial_inc_decode_exports([_|Rest]) ->
+ gen_partial_inc_decode_exports(Rest).
+
+gen_partial_inc_decode_exports1([]) ->
+ emit(["]).",nl]);
+gen_partial_inc_decode_exports1([{Name,_,_}|Rest]) ->
+ emit([", ",Name,"/1"]),
+ gen_partial_inc_decode_exports1(Rest);
+gen_partial_inc_decode_exports1([_|Rest]) ->
+ gen_partial_inc_decode_exports1(Rest).
+
+pgen_dispatcher(Erules,_Module,{[],_Values,_,_,_Objects,_ObjectSets}) ->
+ emit(["encoding_rule() ->",nl]),
+ emit([{asis,Erules},".",nl,nl]);
+pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
+ emit(["-export([encode/2,decode/2,encode_disp/2,decode_disp/2]).",nl,nl]),
+ emit(["encoding_rule() ->",nl]),
+ emit([" ",{asis,Erules},".",nl,nl]),
+ Call = case Erules of
+ per -> "?RT_PER:complete(encode_disp(Type,Data))";
+ per_bin -> "?RT_PER:complete(encode_disp(Type,Data))";
+ ber -> "encode_disp(Type,Data)";
+ ber_bin -> "encode_disp(Type,Data)";
+ ber_bin_v2 -> "encode_disp(Type,Data)"
+ end,
+ EncWrap = case Erules of
+ ber -> "wrap_encode(Bytes)";
+ _ -> "Bytes"
+ end,
+ emit(["encode(Type,Data) ->",nl,
+ "case catch ",Call," of",nl,
+ " {'EXIT',{error,Reason}} ->",nl,
+ " {error,Reason};",nl,
+ " {'EXIT',Reason} ->",nl,
+ " {error,{asn1,Reason}};",nl,
+ " {Bytes,_Len} ->",nl,
+ " {ok,",EncWrap,"};",nl,
+ " Bytes ->",nl,
+ " {ok,",EncWrap,"}",nl,
+ "end.",nl,nl]),
+
+ case Erules of
+ ber_bin_v2 ->
+ emit(["decode(Type,Data0) ->",nl]),
+ emit(["{Data,_RestBin} = ?RT_BER:decode(Data0",driver_parameter(),"),",nl]);
+ _ ->
+ emit(["decode(Type,Data) ->",nl])
+ end,
+ DecWrap = case Erules of
+ ber -> "wrap_decode(Data)";
+ _ -> "Data"
+ end,
+
+ emit(["case catch decode_disp(Type,",DecWrap,") of",nl,
+ " {'EXIT',{error,Reason}} ->",nl,
+ " {error,Reason};",nl,
+ " {'EXIT',Reason} ->",nl,
+ " {error,{asn1,Reason}};",nl]),
+ case Erules of
+ ber_bin_v2 ->
+ emit([" Result ->",nl,
+ " {ok,Result}",nl]);
+ _ ->
+ emit([" {X,_Rest} ->",nl,
+ " {ok,X};",nl,
+ " {X,_Rest,_Len} ->",nl,
+ " {ok,X}",nl])
+ end,
+ emit(["end.",nl,nl]),
+
+ gen_decode_partial_incomplete(Erules),
+
+ case Types of
+ [] -> ok;
+ _ ->
+ case Erules of
+ ber ->
+ gen_dispatcher(Types,"encode_disp","enc_",",[]"),
+ gen_dispatcher(Types,"decode_disp","dec_",",mandatory");
+ ber_bin ->
+ gen_dispatcher(Types,"encode_disp","enc_",",[]"),
+ gen_dispatcher(Types,"decode_disp","dec_",",mandatory");
+ ber_bin_v2 ->
+ gen_dispatcher(Types,"encode_disp","enc_",""),
+ gen_dispatcher(Types,"decode_disp","dec_",""),
+ gen_partial_inc_dispatcher();
+ _PerOrPer_bin ->
+ gen_dispatcher(Types,"encode_disp","enc_",""),
+ gen_dispatcher(Types,"decode_disp","dec_",",mandatory")
+ end,
+ emit([nl])
+ end,
+ case Erules of
+ ber ->
+ gen_wrapper();
+ _ -> ok
+ end,
+ emit({nl,nl}).
+
+
+gen_decode_partial_incomplete(Erule) when Erule == ber;Erule==ber_bin;
+ Erule==ber_bin_v2 ->
+ case {asn1ct:read_config_data(partial_incomplete_decode),
+ asn1ct:get_gen_state_field(inc_type_pattern)} of
+ {undefined,_} ->
+ ok;
+ {_,undefined} ->
+ ok;
+ _ ->
+ case Erule of
+ ber_bin_v2 ->
+ EmitCaseClauses =
+ fun() ->
+ emit([" {'EXIT',{error,Reason}} ->",nl,
+ " {error,Reason};",nl,
+ " {'EXIT',Reason} ->",nl,
+ " {error,{asn1,Reason}};",nl,
+ " Result ->",nl,
+ " {ok,Result}",nl,
+ " end.",nl,nl])
+ end,
+ emit(["decode_partial_incomplete(Type,Data0,",
+ "Pattern) ->",nl]),
+ emit([" {Data,_RestBin} =",nl,
+ " ?RT_BER:decode_primitive_",
+ "incomplete(Pattern,Data0),",nl,
+ " case catch decode_partial_inc_disp(Type,",
+ "Data) of",nl]),
+ EmitCaseClauses(),
+ emit(["decode_part(Type,Data0) ->",nl,
+ " {Data,_RestBin} = ?RT_BER:decode(Data0),",nl,
+ " case catch decode_inc_disp(Type,Data) of",nl]),
+ EmitCaseClauses();
+ _ -> ok % add later
+ end
+ end;
+gen_decode_partial_incomplete(_Erule) ->
+ ok.
+
+gen_partial_inc_dispatcher() ->
+ case {asn1ct:read_config_data(partial_incomplete_decode),
+ asn1ct:get_gen_state_field(inc_type_pattern)} of
+ {undefined,_} ->
+ ok;
+ {_,undefined} ->
+ ok;
+ {Data,_} ->
+ gen_partial_inc_dispatcher(Data)
+ end.
+gen_partial_inc_dispatcher([{_FuncName,TopType,_Pattern}|Rest]) ->
+ emit(["decode_partial_inc_disp(",{asis,TopType},",Data) ->",nl,
+ " ",{asis,list_to_atom(lists:concat([dec,"-inc-",TopType]))},
+ "(Data);",nl]),
+ gen_partial_inc_dispatcher(Rest);
+gen_partial_inc_dispatcher([]) ->
+ emit(["decode_partial_inc_disp(Type,_Data) ->",nl,
+ " exit({error,{asn1,{undefined_type,Type}}}).",nl]).
+
+driver_parameter() ->
+ Options = get(encoding_options),
+ case lists:member(driver,Options) of
+ true ->
+ ",driver";
+ _ -> ""
+ end.
+
+gen_wrapper() ->
+ emit(["wrap_encode(Bytes) when list(Bytes) ->",nl,
+ " binary_to_list(list_to_binary(Bytes));",nl,
+ "wrap_encode(Bytes) when binary(Bytes) ->",nl,
+ " binary_to_list(Bytes);",nl,
+ "wrap_encode(Bytes) -> Bytes.",nl,nl]),
+ emit(["wrap_decode(Bytes) when list(Bytes) ->",nl,
+ " list_to_binary(Bytes);",nl,
+ "wrap_decode(Bytes) -> Bytes.",nl]).
+
+gen_dispatcher([F1,F2|T],FuncName,Prefix,ExtraArg) ->
+ emit([FuncName,"('",F1,"',Data) -> '",Prefix,F1,"'(Data",ExtraArg,")",";",nl]),
+ gen_dispatcher([F2|T],FuncName,Prefix,ExtraArg);
+gen_dispatcher([Flast|_T],FuncName,Prefix,ExtraArg) ->
+ emit([FuncName,"('",Flast,"',Data) -> '",Prefix,Flast,"'(Data",ExtraArg,")",";",nl]),
+ emit([FuncName,"(","Type",",_Data) -> exit({error,{asn1,{undefined_type,Type}}}).",nl,nl,nl]).
+
+pgen_info(_Erules,Module) ->
+ Options = get(encoding_options),
+ emit({"info() ->",nl,
+ " [{vsn,'",asn1ct:vsn(),"'},",
+ " {module,'",Module,"'},",
+ " {options,",io_lib:format("~p",[Options]),"}].",nl}).
+
+open_hrl(OutFile,Module) ->
+ File = lists:concat([OutFile,".hrl"]),
+ Fid = fopen(File,write),
+ put(gen_file_out,Fid),
+ gen_hrlhead(Module).
+
+%% EMIT functions ************************
+%% ***************************************
+
+ % debug generation
+demit(Term) ->
+ case get(asndebug) of
+ true -> emit(Term);
+ _ ->true
+ end.
+
+ % always generation
+
+emit({external,_M,T}) ->
+ emit(T);
+
+emit({prev,Variable}) when atom(Variable) ->
+ emit({var,asn1ct_name:prev(Variable)});
+
+emit({next,Variable}) when atom(Variable) ->
+ emit({var,asn1ct_name:next(Variable)});
+
+emit({curr,Variable}) when atom(Variable) ->
+ emit({var,asn1ct_name:curr(Variable)});
+
+emit({var,Variable}) when atom(Variable) ->
+ [Head|V] = atom_to_list(Variable),
+ emit([Head-32|V]);
+
+emit({var,Variable}) ->
+ [Head|V] = Variable,
+ emit([Head-32|V]);
+
+emit({asis,What}) ->
+ format(get(gen_file_out),"~w",[What]);
+
+emit(nl) ->
+ nl(get(gen_file_out));
+
+emit(com) ->
+ emit(",");
+
+emit(tab) ->
+ put_chars(get(gen_file_out)," ");
+
+emit(What) when integer(What) ->
+ put_chars(get(gen_file_out),integer_to_list(What));
+
+emit(What) when list(What), integer(hd(What)) ->
+ put_chars(get(gen_file_out),What);
+
+emit(What) when atom(What) ->
+ put_chars(get(gen_file_out),atom_to_list(What));
+
+emit(What) when tuple(What) ->
+ emit_parts(tuple_to_list(What));
+
+emit(What) when list(What) ->
+ emit_parts(What);
+
+emit(X) ->
+ exit({'cant emit ',X}).
+
+emit_parts([]) -> true;
+emit_parts([H|T]) ->
+ emit(H),
+ emit_parts(T).
+
+format(undefined,X,Y) ->
+ io:format(X,Y);
+format(X,Y,Z) ->
+ io:format(X,Y,Z).
+
+nl(undefined) -> io:nl();
+nl(X) -> io:nl(X).
+
+put_chars(undefined,X) ->
+ io:put_chars(X);
+put_chars(Y,X) ->
+ io:put_chars(Y,X).
+
+fopen(F, Mode) ->
+ case file:open(F, [Mode]) of
+ {ok, Fd} ->
+ Fd;
+ {error, Reason} ->
+ io:format("** Can't open file ~p ~n", [F]),
+ exit({error,Reason})
+ end.
+
+pgen_hrl(Erules,Module,TypeOrVal,_Indent) ->
+ put(currmod,Module),
+ {Types,Values,Ptypes,_,_,_} = TypeOrVal,
+ Ret =
+ case pgen_hrltypes(Erules,Module,Ptypes++Types,0) of
+ 0 ->
+ case Values of
+ [] ->
+ 0;
+ _ ->
+ open_hrl(get(outfile),get(currmod)),
+ pgen_macros(Erules,Module,Values),
+ 1
+ end;
+ X ->
+ pgen_macros(Erules,Module,Values),
+ X
+ end,
+ case Ret of
+ 0 ->
+ 0;
+ Y ->
+ Fid = get(gen_file_out),
+ file:close(Fid),
+ io:format("--~p--~n",
+ [{generated,lists:concat([get(outfile),".hrl"])}]),
+ Y
+ end.
+
+pgen_macros(_,_,[]) ->
+ true;
+pgen_macros(Erules,Module,[H|T]) ->
+ Valuedef = asn1_db:dbget(Module,H),
+ gen_macro(Valuedef),
+ pgen_macros(Erules,Module,T).
+
+pgen_hrltypes(_,_,[],NumRecords) ->
+ NumRecords;
+pgen_hrltypes(Erules,Module,[H|T],NumRecords) ->
+% io:format("records = ~p~n",NumRecords),
+ Typedef = asn1_db:dbget(Module,H),
+ AddNumRecords = gen_record(Typedef,NumRecords),
+ pgen_hrltypes(Erules,Module,T,NumRecords+AddNumRecords).
+
+
+%% Generates a macro for value Value defined in the ASN.1 module
+gen_macro(Value) when record(Value,valuedef) ->
+ emit({"-define('",Value#valuedef.name,"', ",
+ {asis,Value#valuedef.value},").",nl}).
+
+%% Generate record functions **************
+%% Generates an Erlang record for each named and unnamed SEQUENCE and SET in the ASN.1
+%% module. If no SEQUENCE or SET is found there is no .hrl file generated
+
+
+gen_record(Tdef,NumRecords) when record(Tdef,typedef) ->
+ Name = [Tdef#typedef.name],
+ Type = Tdef#typedef.typespec,
+ gen_record(type,Name,Type,NumRecords);
+
+gen_record(Tdef,NumRecords) when record(Tdef,ptypedef) ->
+ Name = [Tdef#ptypedef.name],
+ Type = Tdef#ptypedef.typespec,
+ gen_record(ptype,Name,Type,NumRecords).
+
+gen_record(TorPtype,Name,[#'ComponentType'{name=Cname,typespec=Type}|T],Num) ->
+ Num2 = gen_record(TorPtype,[Cname|Name],Type,Num),
+ gen_record(TorPtype,Name,T,Num2);
+gen_record(TorPtype,Name,{Clist1,Clist2},Num) when list(Clist1), list(Clist2) ->
+ gen_record(TorPtype,Name,Clist1++Clist2,Num);
+gen_record(TorPtype,Name,[_|T],Num) -> % skip EXTENSIONMARK
+ gen_record(TorPtype,Name,T,Num);
+gen_record(_TorPtype,_Name,[],Num) ->
+ Num;
+
+gen_record(TorPtype,Name,Type,Num) when record(Type,type) ->
+ Def = Type#type.def,
+ Rec = case Def of
+ Seq when record(Seq,'SEQUENCE') ->
+ case Seq#'SEQUENCE'.pname of
+ false ->
+ {record,Seq#'SEQUENCE'.components};
+ _Pname when TorPtype == type ->
+ false;
+ _ ->
+ {record,Seq#'SEQUENCE'.components}
+ end;
+ Set when record(Set,'SET') ->
+ case Set#'SET'.pname of
+ false ->
+ {record,Set#'SET'.components};
+ _Pname when TorPtype == type ->
+ false;
+ _ ->
+ {record,Set#'SET'.components}
+ end;
+% {'SET',{_,_CompList}} ->
+% {record,_CompList};
+ {'CHOICE',_CompList} -> {inner,Def};
+ {'SEQUENCE OF',_CompList} -> {['SEQOF'|Name],Def};
+ {'SET OF',_CompList} -> {['SETOF'|Name],Def};
+ _ -> false
+ end,
+ case Rec of
+ false -> Num;
+ {record,CompList} ->
+ case Num of
+ 0 -> open_hrl(get(outfile),get(currmod));
+ _ -> true
+ end,
+ emit({"-record('",list2name(Name),"',{",nl}),
+ RootList = case CompList of
+ _ when list(CompList) ->
+ CompList;
+ {_Rl,_} -> _Rl
+ end,
+ gen_record2(Name,'SEQUENCE',RootList),
+ NewCompList =
+ case CompList of
+ {CompList1,[]} ->
+ emit({"}). % with extension mark",nl,nl}),
+ CompList1;
+ {Tr,ExtensionList2} ->
+ case Tr of
+ [] -> true;
+ _ -> emit({",",nl})
+ end,
+ emit({"%% with extensions",nl}),
+ gen_record2(Name, 'SEQUENCE', ExtensionList2,
+ "", ext),
+ emit({"}).",nl,nl}),
+ Tr ++ ExtensionList2;
+ _ ->
+ emit({"}).",nl,nl}),
+ CompList
+ end,
+ gen_record(TorPtype,Name,NewCompList,Num+1);
+ {inner,{'CHOICE', CompList}} ->
+ gen_record(TorPtype,Name,CompList,Num);
+ {NewName,{_, CompList}} ->
+ gen_record(TorPtype,NewName,CompList,Num)
+ end;
+gen_record(_,_,_,NumRecords) -> % skip CLASS etc for now.
+ NumRecords.
+
+gen_head(Erules,Mod,Hrl) ->
+ {Rtmac,Rtmod} = case Erules of
+ per ->
+ emit({"%% Generated by the Erlang ASN.1 PER-"
+ "compiler version:",asn1ct:vsn(),nl}),
+ {"RT_PER",?RT_PER};
+ ber ->
+ emit({"%% Generated by the Erlang ASN.1 BER-"
+ "compiler version:",asn1ct:vsn(),nl}),
+ {"RT_BER",?RT_BER_BIN};
+ per_bin ->
+ emit({"%% Generated by the Erlang ASN.1 BER-"
+ "compiler version, utilizing bit-syntax:",
+ asn1ct:vsn(),nl}),
+ %% temporary code to enable rt2ct optimization
+ Options = get(encoding_options),
+ case lists:member(optimize,Options) of
+ true -> {"RT_PER","asn1rt_per_bin_rt2ct"};
+ _ ->
+ {"RT_PER",?RT_PER_BIN}
+ end;
+ ber_bin ->
+ emit({"%% Generated by the Erlang ASN.1 BER-"
+ "compiler version, utilizing bit-syntax:",
+ asn1ct:vsn(),nl}),
+ {"RT_BER",?RT_BER_BIN};
+ ber_bin_v2 ->
+ emit({"%% Generated by the Erlang ASN.1 BER_V2-"
+ "compiler version, utilizing bit-syntax:",
+ asn1ct:vsn(),nl}),
+ {"RT_BER","asn1rt_ber_bin_v2"}
+ end,
+ emit({"%% Purpose: encoder and decoder to the types in mod ",Mod,nl,nl}),
+ emit({"-module('",Mod,"').",nl}),
+ put(currmod,Mod),
+ %emit({"-compile(export_all).",nl}),
+ case Hrl of
+ 0 -> true;
+ _ ->
+ emit({"-include(\"",Mod,".hrl\").",nl})
+ end,
+ emit(["-define('",Rtmac,"',",Rtmod,").",nl]).
+
+
+gen_hrlhead(Mod) ->
+ emit({"%% Generated by the Erlang ASN.1 compiler version:",asn1ct:vsn(),nl}),
+ emit({"%% Purpose: Erlang record definitions for each named and unnamed",nl}),
+ emit({"%% SEQUENCE and SET, and macro definitions for each value",nl}),
+ emit({"%% definition,in module ",Mod,nl,nl}),
+ emit({nl,nl}).
+
+gen_record2(Name,SeqOrSet,Comps) ->
+ gen_record2(Name,SeqOrSet,Comps,"",noext).
+
+gen_record2(_Name,_SeqOrSet,[],_Com,_Extension) ->
+ true;
+gen_record2(Name,SeqOrSet,[{'EXTENSIONMARK',_,_}|T],Com,Extension) ->
+ gen_record2(Name,SeqOrSet,T,Com,Extension);
+gen_record2(_Name,_SeqOrSet,[H],Com,Extension) ->
+ #'ComponentType'{name=Cname} = H,
+ emit(Com),
+ emit({asis,Cname}),
+ gen_record_default(H, Extension);
+gen_record2(Name,SeqOrSet,[H|T],Com, Extension) ->
+ #'ComponentType'{name=Cname} = H,
+ emit(Com),
+ emit({asis,Cname}),
+ gen_record_default(H, Extension),
+% emit(", "),
+ gen_record2(Name,SeqOrSet,T,", ", Extension).
+
+%gen_record_default(C, ext) ->
+% emit(" = asn1_NOEXTVALUE");
+gen_record_default(#'ComponentType'{prop='OPTIONAL'}, _)->
+ emit(" = asn1_NOVALUE");
+gen_record_default(#'ComponentType'{prop={'DEFAULT',_}}, _)->
+ emit(" = asn1_DEFAULT");
+gen_record_default(_, _) ->
+ true.
+
+gen_check_call(TopType,Cname,Type,InnerType,WhatKind,DefaultValue,Element) ->
+ case WhatKind of
+ {primitive,bif} ->
+ gen_prim_check_call(InnerType,DefaultValue,Element,Type);
+ #'Externaltypereference'{module=M,type=T} ->
+ %% generate function call
+ Name = list2name([T,check]),
+ emit({"'",Name,"'(",DefaultValue,", ",Element,")"}),
+ %% insert in ets table and do look ahead check
+ Typedef = asn1_db:dbget(M,T),
+ RefType = Typedef#typedef.typespec,
+ InType = asn1ct_gen:get_inner(RefType#type.def),
+ case insert_once(check_functions,{Name,RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+% case asn1ct_gen:type(InType) of
+% {constructed,bif} ->
+% lookahead_innertype([T],InType,RefType);
+% #'Externaltypereference'{type=TNew} ->
+% lookahead_innertype([TNew],InType,RefType);
+% _ ->
+% ok
+% end;
+ _ ->
+ ok
+ end;
+ {constructed,bif} ->
+ NameList = [Cname|TopType],
+ Name = list2name(NameList ++ [check]),
+ emit({"'",Name,"'(",DefaultValue,", ",Element,")"}),
+ ets:insert(check_functions,{Name,Type}),
+ %% Must look for check functions in InnerType,
+ %% that may be referenced or internal defined
+ %% constructed types not used elsewhere.
+ lookahead_innertype(NameList,InnerType,Type)
+ end.
+
+gen_prim_check_call(PrimType,DefaultValue,Element,Type) ->
+ case unify_if_string(PrimType) of
+ 'BOOLEAN' ->
+ emit({"asn1rt_check:check_bool(",DefaultValue,", ",
+ Element,")"});
+ 'INTEGER' ->
+ NNL =
+ case Type#type.def of
+ {_,NamedNumberList} -> NamedNumberList;
+ _ -> []
+ end,
+ emit({"asn1rt_check:check_int(",DefaultValue,", ",
+ Element,", ",{asis,NNL},")"});
+ 'BIT STRING' ->
+ {_,NBL} = Type#type.def,
+ emit({"asn1rt_check:check_bitstring(",DefaultValue,", ",
+ Element,", ",{asis,NBL},")"});
+ 'OCTET STRING' ->
+ emit({"asn1rt_check:check_octetstring(",DefaultValue,", ",
+ Element,")"});
+ 'NULL' ->
+ emit({"asn1rt_check:check_null(",DefaultValue,", ",
+ Element,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"asn1rt_check:check_objectidentifier(",DefaultValue,
+ ", ",Element,")"});
+ 'ObjectDescriptor' ->
+ emit({"asn1rt_check:check_objectdescriptor(",DefaultValue,
+ ", ",Element,")"});
+ 'REAL' ->
+ emit({"asn1rt_check:check_real(",DefaultValue,
+ ", ",Element,")"});
+ 'ENUMERATED' ->
+ {_,Enumerations} = Type#type.def,
+ emit({"asn1rt_check:check_enum(",DefaultValue,
+ ", ",Element,", ",{asis,Enumerations},")"});
+ restrictedstring ->
+ emit({"asn1rt_check:check_restrictedstring(",DefaultValue,
+ ", ",Element,")"})
+ end.
+
+%% lokahead_innertype/3 traverses Type and checks if check functions
+%% have to be generated, i.e. for all constructed or referenced types.
+lookahead_innertype(Name,'SEQUENCE',Type) ->
+ Components = (Type#type.def)#'SEQUENCE'.components,
+ lookahead_components(Name,Components);
+lookahead_innertype(Name,'SET',Type) ->
+ Components = (Type#type.def)#'SET'.components,
+ lookahead_components(Name,Components);
+lookahead_innertype(Name,'CHOICE',Type) ->
+ {_,Components} = Type#type.def,
+ lookahead_components(Name,Components);
+lookahead_innertype(Name,'SEQUENCE OF',SeqOf) ->
+ lookahead_sof(Name,'SEQOF',SeqOf);
+lookahead_innertype(Name,'SET OF',SeqOf) ->
+ lookahead_sof(Name,'SETOF',SeqOf);
+lookahead_innertype(_Name,#'Externaltypereference'{module=M,type=T},_) ->
+ Typedef = asn1_db:dbget(M,T),
+ RefType = Typedef#typedef.typespec,
+ InType = asn1ct_gen:get_inner(RefType#type.def),
+ case type(InType) of
+ {constructed,bif} ->
+ NewName = list2name([T,check]),
+ case insert_once(check_functions,{NewName,RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+ _ ->
+ ok
+ end;
+ #'Externaltypereference'{} ->
+ NewName = list2name([T,check]),
+ case insert_once(check_functions,{NewName,RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end;
+% case insert_once(check_functions,{list2name(Name++[check]),Type}) of
+% true ->
+% InnerType = asn1ct_gen:get_inner(Type#type.def),
+% case asn1ct_gen:type(InnerType) of
+% {constructed,bif} ->
+% lookahead_innertype([T],InnerType,Type);
+% #'Externaltypereference'{type=TNew} ->
+% lookahead_innertype([TNew],InnerType,Type);
+% _ ->
+% ok
+% end;
+% _ ->
+% ok
+% end;
+lookahead_innertype(_,_,_) ->
+ ok.
+
+lookahead_components(_,[]) -> ok;
+lookahead_components(Name,[C|Cs]) ->
+ #'ComponentType'{name=Cname,typespec=Type} = C,
+ InType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InType) of
+ {constructed,bif} ->
+ case insert_once(check_functions,
+ {list2name([Cname|Name] ++ [check]),Type}) of
+ true ->
+ lookahead_innertype([Cname|Name],InType,Type);
+ _ ->
+ ok
+ end;
+ #'Externaltypereference'{module=RefMod,type=RefName} ->
+ Typedef = asn1_db:dbget(RefMod,RefName),
+ RefType = Typedef#typedef.typespec,
+ case insert_once(check_functions,{list2name([RefName,check]),
+ RefType}) of
+ true ->
+ lookahead_innertype([RefName],InType,RefType);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end,
+ lookahead_components(Name,Cs).
+
+lookahead_sof(Name,SOF,SOFType) ->
+ Type = case SOFType#type.def of
+ {_,_Type} -> _Type;
+ _Type -> _Type
+ end,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ %% this is if a constructed type is defined in
+ %% the SEQUENCE OF type
+ NameList = [SOF|Name],
+ insert_once(check_functions,
+ {list2name(NameList ++ [check]),Type}),
+ lookahead_innertype(NameList,InnerType,Type);
+ #'Externaltypereference'{module=M,type=T} ->
+ Typedef = asn1_db:dbget(M,T),
+ RefType = Typedef#typedef.typespec,
+ InType = get_inner(RefType#type.def),
+ case insert_once(check_functions,
+ {list2name([T,check]),RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end.
+
+
+insert_once(Table,Object) ->
+ case ets:lookup(Table,element(1,Object)) of
+ [] ->
+ ets:insert(Table,Object); %returns true
+ _ -> false
+ end.
+
+unify_if_string(PrimType) ->
+ case PrimType of
+ 'NumericString' ->
+ restrictedstring;
+ 'PrintableString' ->
+ restrictedstring;
+ 'TeletexString' ->
+ restrictedstring;
+ 'VideotexString' ->
+ restrictedstring;
+ 'IA5String' ->
+ restrictedstring;
+ 'UTCTime' ->
+ restrictedstring;
+ 'GeneralizedTime' ->
+ restrictedstring;
+ 'GraphicString' ->
+ restrictedstring;
+ 'VisibleString' ->
+ restrictedstring;
+ 'GeneralString' ->
+ restrictedstring;
+ 'UniversalString' ->
+ restrictedstring;
+ 'BMPString' ->
+ restrictedstring;
+ Other -> Other
+ end.
+
+
+
+
+
+get_inner(A) when atom(A) -> A;
+get_inner(Ext) when record(Ext,'Externaltypereference') -> Ext;
+get_inner(Tref) when record(Tref,typereference) -> Tref;
+get_inner({fixedtypevaluefield,_,Type}) ->
+ if
+ record(Type,type) ->
+ get_inner(Type#type.def);
+ true ->
+ get_inner(Type)
+ end;
+get_inner({typefield,TypeName}) ->
+ TypeName;
+get_inner(#'ObjectClassFieldType'{type=Type}) ->
+% get_inner(Type);
+ Type;
+get_inner(T) when tuple(T) ->
+ case element(1,T) of
+ Tuple when tuple(Tuple),element(1,Tuple) == objectclass ->
+ case catch(lists:last(element(2,T))) of
+ {valuefieldreference,FieldName} ->
+ get_fieldtype(element(2,Tuple),FieldName);
+ {typefieldreference,FieldName} ->
+ get_fieldtype(element(2,Tuple),FieldName);
+ {'EXIT',Reason} ->
+ throw({asn1,{'internal error in get_inner/1',Reason}})
+ end;
+ _ -> element(1,T)
+ end.
+
+
+
+
+
+type(X) when record(X,'Externaltypereference') ->
+ X;
+type(X) when record(X,typereference) ->
+ X;
+type('ASN1_OPEN_TYPE') ->
+ 'ASN1_OPEN_TYPE';
+type({fixedtypevaluefield,_Name,Type}) when record(Type,type) ->
+ type(get_inner(Type#type.def));
+type({typefield,_}) ->
+ 'ASN1_OPEN_TYPE';
+type(X) ->
+ %% io:format("asn1_types:type(~p)~n",[X]),
+ case catch type2(X) of
+ {'EXIT',_} ->
+ {notype,X};
+ Normal ->
+ Normal
+ end.
+
+type2(X) ->
+ case prim_bif(X) of
+ true ->
+ {primitive,bif};
+ false ->
+ case construct_bif(X) of
+ true ->
+ {constructed,bif};
+ false ->
+ {undefined,user}
+ end
+ end.
+
+prim_bif(X) ->
+ lists:member(X,['INTEGER' ,
+ 'ENUMERATED',
+ 'OBJECT IDENTIFIER',
+ 'ANY',
+ 'NULL',
+ 'BIT STRING' ,
+ 'OCTET STRING' ,
+ 'ObjectDescriptor',
+ 'NumericString',
+ 'TeletexString',
+ 'VideotexString',
+ 'UTCTime',
+ 'GeneralizedTime',
+ 'GraphicString',
+ 'VisibleString',
+ 'GeneralString',
+ 'PrintableString',
+ 'IA5String',
+ 'UniversalString',
+ 'BMPString',
+ 'ENUMERATED',
+ 'BOOLEAN']).
+
+construct_bif(T) ->
+ lists:member(T,['SEQUENCE' ,
+ 'SEQUENCE OF' ,
+ 'CHOICE' ,
+ 'SET' ,
+ 'SET OF']).
+
+def_to_tag(#tag{class=Class,number=Number}) ->
+ {Class,Number};
+def_to_tag(#'ObjectClassFieldType'{type=Type}) ->
+ case Type of
+ T when tuple(T),element(1,T)==fixedtypevaluefield ->
+ {'UNIVERSAL',get_inner(Type)};
+ _ ->
+ []
+ end;
+def_to_tag(Def) ->
+ {'UNIVERSAL',get_inner(Def)}.
+
+
+%% Information Object Class
+
+type_from_object(X) ->
+ case (catch lists:last(element(2,X))) of
+ {'EXIT',_} ->
+ {notype,X};
+ Normal ->
+ Normal
+ end.
+
+
+get_fieldtype([],_FieldName)->
+ {no_type,no_name};
+get_fieldtype([Field|Rest],FieldName) ->
+ case element(2,Field) of
+ FieldName ->
+ case element(1,Field) of
+ fixedtypevaluefield ->
+ {element(1,Field),FieldName,element(3,Field)};
+ _ ->
+ {element(1,Field),FieldName}
+ end;
+ _ ->
+ get_fieldtype(Rest,FieldName)
+ end.
+
+get_fieldcategory([],_FieldName) ->
+ no_cat;
+get_fieldcategory([Field|Rest],FieldName) ->
+ case element(2,Field) of
+ FieldName ->
+ element(1,Field);
+ _ ->
+ get_fieldcategory(Rest,FieldName)
+ end.
+
+get_typefromobject(Type) when record(Type,type) ->
+ case Type#type.def of
+ {{objectclass,_,_},TypeFrObj} when list(TypeFrObj) ->
+ {_,FieldName} = lists:last(TypeFrObj),
+ FieldName;
+ _ ->
+ {no_field}
+ end.
+
+get_classfieldcategory(Type,FieldName) ->
+ case (catch Type#type.def) of
+ {{obejctclass,Fields,_},_} ->
+ get_fieldcategory(Fields,FieldName);
+ {'EXIT',_} ->
+ no_cat;
+ _ ->
+ no_cat
+ end.
+%% Information Object Class
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Convert a list of name parts to something that can be output by emit
+%%
+%% used to output function names in generated code.
+
+list2name(L) ->
+ NewL = list2name1(L),
+ lists:concat(lists:reverse(NewL)).
+
+list2name1([{ptype,H1},H2|T]) ->
+ [H1,"_",list2name([H2|T])];
+list2name1([H1,H2|T]) ->
+ [H1,"_",list2name([H2|T])];
+list2name1([{ptype,H}|_T]) ->
+ [H];
+list2name1([H|_T]) ->
+ [H];
+list2name1([]) ->
+ [].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Convert a list of name parts to something that can be output by emit
+%% stops at {ptype,Pname} i.e Pname whill be the first part of the name
+%% used to output record names in generated code.
+
+list2rname(L) ->
+ NewL = list2rname1(L),
+ lists:concat(lists:reverse(NewL)).
+
+list2rname1([{ptype,H1},_H2|_T]) ->
+ [H1];
+list2rname1([H1,H2|T]) ->
+ [H1,"_",list2name([H2|T])];
+list2rname1([{ptype,H}|_T]) ->
+ [H];
+list2rname1([H|_T]) ->
+ [H];
+list2rname1([]) ->
+ [].
+
+
+
+constructed_suffix(_,#'SEQUENCE'{pname=Ptypename}) when Ptypename =/= false ->
+ {ptype, Ptypename};
+constructed_suffix(_,#'SET'{pname=Ptypename}) when Ptypename =/= false ->
+ {ptype,Ptypename};
+constructed_suffix('SEQUENCE OF',_) ->
+ 'SEQOF';
+constructed_suffix('SET OF',_) ->
+ 'SETOF'.
+
+erule(ber) ->
+ ber;
+erule(ber_bin) ->
+ ber;
+erule(ber_bin_v2) ->
+ ber_bin_v2;
+erule(per) ->
+ per;
+erule(per_bin) ->
+ per.
+
+wrap_ber(ber) ->
+ ber_bin;
+wrap_ber(Erule) ->
+ Erule.
+
+rt2ct_suffix() ->
+ Options = get(encoding_options),
+ case {lists:member(optimize,Options),lists:member(per_bin,Options)} of
+ {true,true} -> "_rt2ct";
+ _ -> ""
+ end.
+rt2ct_suffix(per_bin) ->
+ Options = get(encoding_options),
+ case lists:member(optimize,Options) of
+ true -> "_rt2ct";
+ _ -> ""
+ end;
+rt2ct_suffix(_) -> "".
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V;
+ {value,Cnstr} ->
+ Cnstr
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber.erl
new file mode 100644
index 0000000000..f063dff765
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber.erl
@@ -0,0 +1,1525 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_ber.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_gen_ber).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+
+-export([pgen/4]).
+-export([decode_class/1, decode_type/1]).
+-export([add_removed_bytes/0]).
+-export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]).
+-export([gen_encode_prim/4]).
+-export([gen_dec_prim/8]).
+-export([gen_objectset_code/2, gen_obj_code/3]).
+-export([re_wrap_erule/1]).
+-export([unused_var/2]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+ % the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+ % primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+-define(T_ObjectDescriptor, ?UNIVERSAL bor ?PRIMITIVE bor 7).
+ % restricted character string types
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList,PTypeList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate ENCODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+
+%%===============================================================================
+%% encode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+ ", ObjFun";
+ false ->
+ ""
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([nl,nl,nl,"%%================================"]),
+ emit([nl,"%% ",asn1ct_gen:list2name(Typename)]),
+ emit([nl,"%%================================",nl]),
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,
+ ") when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn",ObjFun,");",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}, TagIn",ObjFun,") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,");",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,") ->",nl," "]),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end;
+
+%%===============================================================================
+%% encode ComponentType
+%%===============================================================================
+
+gen_encode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_,_}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ gen_encode(Erules,NewTname,NewType).
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Type = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ emit([nl,nl,"%%================================"]),
+ emit([nl,"%% ",Typename]),
+ emit([nl,"%%================================",nl]),
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn) when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn);",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),"',Val}, TagIn) ->",nl}),
+ emit({" 'enc_",asn1ct_gen:list2name(Typename),"'(Val, TagIn);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(",
+ unused_var("Val",Type#type.def),", TagIn) ->",nl}),
+ CurrentMod = get(currmod),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ {primitive,bif} ->
+ asn1ct_gen_ber:gen_encode_prim(ber,Type,["TagIn ++ ",
+ {asis,Tag}],"Val"),
+ emit([".",nl]);
+ #typereference{val=Ename} ->
+ emit([" 'enc_",Ename,"'(Val, TagIn ++ ",{asis,Tag},").",nl]);
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, TagIn ++ ",
+ {asis,Tag},").",nl]);
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, TagIn ++ ",
+ {asis,Tag},").",nl]);
+ 'ASN1_OPEN_TYPE' ->
+ emit(["%% OPEN TYPE",nl]),
+ asn1ct_gen_ber:gen_encode_prim(ber,
+ Type#type{def='ASN1_OPEN_TYPE'},
+ ["TagIn ++ ",
+ {asis,Tag}],"Val"),
+ emit([".",nl])
+ end.
+
+unused_var(Var,#'SEQUENCE'{components=Cl}) ->
+ unused_var1(Var,Cl);
+unused_var(Var,#'SET'{components=Cl}) ->
+ unused_var1(Var,Cl);
+unused_var(Var,_) ->
+ Var.
+unused_var1(Var,Cs) when Cs == []; Cs == {[],[]} ->
+ lists:concat(["_",Var]);
+unused_var1(Var,_) ->
+ Var.
+
+unused_optormand_var(Var,Def) ->
+ case asn1ct_gen:type(asn1ct_gen:get_inner(Def)) of
+ 'ASN1_OPEN_TYPE' ->
+ lists:concat(["_",Var]);
+ _ ->
+ Var
+ end.
+
+
+gen_encode_prim(_Erules,D,DoTag,Value) when record(D,type) ->
+
+%%% Currently not used for BER (except for BitString) and therefore replaced
+%%% with [] as a placeholder
+ BitStringConstraint = D#type.constraint,
+ Constraint = [],
+ asn1ct_name:new(enumval),
+ case D#type.def of
+ 'BOOLEAN' ->
+ emit_encode_func('boolean',Value,DoTag);
+ 'INTEGER' ->
+ emit_encode_func('integer',Constraint,Value,DoTag);
+ {'INTEGER',NamedNumberList} ->
+ emit_encode_func('integer',Constraint,Value,
+ NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList={_,_}} ->
+
+ emit(["case (case ",Value," of {asn1_enum,_}->",Value,";{_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList} ->
+
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+
+ {'BIT STRING',NamedNumberList} ->
+ emit_encode_func('bit_string',BitStringConstraint,Value,
+ NamedNumberList,DoTag);
+ 'ANY' ->
+ emit_encode_func('open_type', Value,DoTag);
+ 'NULL' ->
+ emit_encode_func('null',Value,DoTag);
+ 'OBJECT IDENTIFIER' ->
+ emit_encode_func("object_identifier",Value,DoTag);
+ 'ObjectDescriptor' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_ObjectDescriptor,DoTag);
+ 'OCTET STRING' ->
+ emit_encode_func('octet_string',Constraint,Value,DoTag);
+ 'NumericString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_NumericString,DoTag);
+ 'TeletexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_TeletexString,DoTag);
+ 'VideotexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VideotexString,DoTag);
+ 'GraphicString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GraphicString,DoTag);
+ 'VisibleString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VisibleString,DoTag);
+ 'GeneralString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GeneralString,DoTag);
+ 'PrintableString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_PrintableString,DoTag);
+ 'IA5String' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_IA5String,DoTag);
+ 'UniversalString' ->
+ emit_encode_func('universal_string',Constraint,Value,DoTag);
+ 'BMPString' ->
+ emit_encode_func('BMP_string',Constraint,Value,DoTag);
+ 'UTCTime' ->
+ emit_encode_func('utc_time',Constraint,Value,DoTag);
+ 'GeneralizedTime' ->
+ emit_encode_func('generalized_time',Constraint,Value,DoTag);
+ 'ASN1_OPEN_TYPE' ->
+ emit_encode_func('open_type', Value,DoTag);
+ XX ->
+ exit({'can not encode' ,XX})
+ end.
+
+
+emit_encode_func(Name,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Value,Tags);
+emit_encode_func(Name,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Tags);
+emit_encode_func(Name,Constraint,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Asis,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Asis,Tags);
+emit_encode_func(Name,Constraint,Value,Asis,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,
+ ", ",{asis,Asis},
+ ", ",Tags,")"]).
+
+emit_enc_enumerated_cases({L1,L2}, Tags) ->
+ emit_enc_enumerated_cases(L1++L2, Tags, ext);
+emit_enc_enumerated_cases(L, Tags) ->
+ emit_enc_enumerated_cases(L, Tags, noext).
+
+emit_enc_enumerated_cases([{EnumName,EnumVal},H2|T], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+ emit_enc_enumerated_cases([H2|T], Tags, Ext);
+emit_enc_enumerated_cases([{EnumName,EnumVal}], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+ case Ext of
+ noext -> emit([";",nl]);
+ ext ->
+ emit([";",nl,"{asn1_enum,",{curr,enumval},"} -> ",
+ "?RT_BER:encode_enumerated(",{curr,enumval},",",Tags,");",nl]),
+ asn1ct_name:new(enumval)
+ end,
+ emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]),
+ emit([nl,"end"]).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate DECODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% decode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ D = Type,
+ emit({nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes, OptOrMand) ->",nl}),
+ emit({" 'dec_",Type#typedef.name,"'(Bytes, OptOrMand, []).",nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes, ",
+ unused_optormand_var("OptOrMand",(Type#typedef.typespec)#type.def),", TagIn) ->",nl}),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,D).
+
+
+%%===============================================================================
+%% decode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+gen_decode(Erules,Tname,Type) when record(Type,type) ->
+ Typename = Tname,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({"'dec_",asn1ct_gen:list2name(Typename),"'(Bytes, OptOrMand, TagIn",ObjFun,") ->",nl}),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end;
+
+
+%%===============================================================================
+%% decode ComponentType
+%%===============================================================================
+
+gen_decode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_,_}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ gen_decode(Erules,NewTname,NewType).
+
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ InnerTag = Def#type.tag ,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- InnerTag],
+ case asn1ct_gen:type(InnerType) of
+ 'ASN1_OPEN_TYPE' ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ asn1ct_name:new(len),
+ gen_dec_prim(Erules, Def#type{def='ASN1_OPEN_TYPE'},
+ BytesVar, Tag, "TagIn",no_length,
+ ?PRIMITIVE,"OptOrMand"),
+ emit({".",nl,nl});
+ {primitive,bif} ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ asn1ct_name:new(len),
+ gen_dec_prim(Erules, Def, BytesVar, Tag, "TagIn",no_length,
+ ?PRIMITIVE,"OptOrMand"),
+ emit({".",nl,nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ TheType ->
+ DecFunName = mkfuncname(TheType,dec),
+ emit({DecFunName,"(",{curr,bytes},
+ ", OptOrMand, TagIn++",{asis,Tag},")"}),
+ emit({".",nl,nl})
+ end.
+
+
+gen_dec_prim(Erules,Att,BytesVar,DoTag,TagIn,Length,_Form,OptOrMand) ->
+ Typename = Att#type.def,
+%% Currently not used for BER replaced with [] as place holder
+%% Constraint = Att#type.constraint,
+%% Constraint = [],
+ Constraint =
+ case get_constraint(Att#type.constraint,'SizeConstraint') of
+ no -> [];
+ Tc -> Tc
+ end,
+ ValueRange =
+ case get_constraint(Att#type.constraint,'ValueRange') of
+ no -> [];
+ Tv -> Tv
+ end,
+ SingleValue =
+ case get_constraint(Att#type.constraint,'SingleValue') of
+ no -> [];
+ Sv -> Sv
+ end,
+ AsBin = case get(binary_strings) of
+ true -> "_as_bin";
+ _ -> ""
+ end,
+ NewTypeName = case Typename of
+ 'ANY' -> 'ASN1_OPEN_TYPE';
+ _ -> Typename
+ end,
+ DoLength =
+ case NewTypeName of
+ 'BOOLEAN'->
+ emit({"?RT_BER:decode_boolean(",BytesVar,","}),
+ false;
+ 'INTEGER' ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},","}),
+ false;
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},",",
+ {asis,NamedNumberList},","}),
+ false;
+ {'ENUMERATED',NamedNumberList} ->
+ emit({"?RT_BER:decode_enumerated(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ false;
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_BER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},","});
+ _ ->
+ emit({"?RT_BER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","})
+ end,
+ true;
+ 'NULL' ->
+ emit({"?RT_BER:decode_null(",BytesVar,","}),
+ false;
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_BER:decode_object_identifier(",BytesVar,","}),
+ false;
+ 'ObjectDescriptor' ->
+ emit({"?RT_BER:decode_restricted_string(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_ObjectDescriptor},","}),
+ true;
+ 'OCTET STRING' ->
+ emit({"?RT_BER:decode_octet_string",AsBin,"(",BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'NumericString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_NumericString},","}),true;
+ 'TeletexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_TeletexString},","}),
+ true;
+ 'VideotexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VideotexString},","}),
+ true;
+ 'GraphicString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GraphicString},","})
+ ,true;
+ 'VisibleString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VisibleString},","}),
+ true;
+ 'GeneralString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GeneralString},","}),
+ true;
+ 'PrintableString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_PrintableString},","}),
+ true;
+ 'IA5String' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_IA5String},","}),
+ true;
+ 'UniversalString' ->
+ emit({"?RT_BER:decode_universal_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'BMPString' ->
+ emit({"?RT_BER:decode_BMP_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'UTCTime' ->
+ emit({"?RT_BER:decode_utc_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'GeneralizedTime' ->
+ emit({"?RT_BER:decode_generalized_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'ASN1_OPEN_TYPE' ->
+ emit(["?RT_BER:decode_open_type(",re_wrap_erule(Erules),",",
+ BytesVar,","]),
+ false;
+ Other ->
+ exit({'can not decode' ,Other})
+ end,
+
+ NewLength = case DoLength of
+ true -> [", ", Length];
+ false -> ""
+ end,
+ NewOptOrMand = case OptOrMand of
+ _ when list(OptOrMand) -> OptOrMand;
+ mandatory -> {asis,mandatory};
+ _ -> {asis,opt_or_default}
+ end,
+ case {TagIn,NewTypeName} of
+ {[],'ASN1_OPEN_TYPE'} ->
+ emit([{asis,DoTag},")"]);
+ {_,'ASN1_OPEN_TYPE'} ->
+ emit([TagIn,"++",{asis,DoTag},")"]);
+ {[],_} ->
+ emit([{asis,DoTag},NewLength,", ",NewOptOrMand,")"]);
+ _ when list(TagIn) ->
+ emit([TagIn,"++",{asis,DoTag},NewLength,", ",NewOptOrMand,")"])
+ end.
+
+
+int_constr([],[]) ->
+ [];
+int_constr([],ValueRange) ->
+ ValueRange;
+int_constr(SingleValue,[]) ->
+ SingleValue;
+int_constr(SV,VR) ->
+ [SV,VR].
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=M,type=ClName} = Def#'Object'.classname,
+ Class = asn1_db:dbget(M,ClName),
+
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+ gen_encode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+ gen_decode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed);
+gen_obj_code(_Erules,_Module,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Args,", _RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, TagIn, _RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_, _"),
+ emit([" {[],0}"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val, TagIn"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val, TagIn"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, TagIn, [H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause(" Val, TagIn, [H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, TagIn, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, TagIn, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_encode_objectfields(ClassName,[_|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+% MaybeConstr=
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% OTag = Def#type.tag,
+% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, TagIn, RestPrimFieldName) ->",nl}),
+% CAcc=
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_encode_prim(ber,Def,["TagIn ++ ",{asis,Tag}],
+% "Val"),
+% [];
+% {constructed,bif} ->
+% %%InnerType = asn1ct_gen:get_inner(Def#type.def),
+% %%asn1ct_gen:gen_encode_constructed(ber,[ObjName],
+% %% InnerType,Def);
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val, TagIn ++ ",{asis,Tag},")"}),
+% [{['enc_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,
+% "'(Val, TagIn ++ ",{asis,Tag},")"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val, TagIn ++ ",
+% {asis,Tag},")"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, TagIn, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, TagIn, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, TagIn, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_encode_objectfields(C,O,[H|T],Acc) ->
+% gen_encode_objectfields(C,O,T,Acc);
+% gen_encode_objectfields(_,_,[],Acc) ->
+% Acc.
+
+% gen_encode_constr_type([{Name,Def}|Rest]) ->
+% emit({Name,"(Val,TagIn) ->",nl}),
+% InnerType = asn1ct_gen:get_inner(Def#type.def),
+% asn1ct_gen:gen_encode_constructed(ber,Name,InnerType,Def),
+% gen_encode_constr_type(Rest);
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ -> gen_encode_user(Erules,TypeDef)
+ end,
+ gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+ gen_encode_prim(ber,Def,["TagIn ++ ",{asis,Tag}],
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val, TagIn ++",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val, TagIn ++ ",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val, TagIn ++ ",{asis,Tag},")"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Bytes, TagIn ++ ",
+ {asis,Tag},")"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(ber,Type,["TagIn ++ ",{asis,Tag}],"Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, TagIn ++ ",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, TagIn ++ ",{asis,Tag},")",nl]),
+ []
+ end.
+
+
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Args,"_) ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes, TagIn, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_, _,"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes, TagIn,"),
+ gen_decode_default_call(ClassName,Name,"Bytes",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes, TagIn,"),
+ gen_decode_field_call(ObjName,Name,"Bytes",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,TagIn,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,TagIn,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, TagIn, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, TagIn, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+
+
+% gen_decode_objectfields(Erules,Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Bytes, TagIn, RestPrimFieldName) ->",nl}),
+% OTag = Def#type.tag,
+% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% Prop =
+% case get_optionalityspec(Fields,FieldName) of
+% 'OPTIONAL' -> opt_or_default;
+% {'DEFAULT',_} -> opt_or_default;
+% _ -> mandatory
+% end,
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_dec_prim(Erules,Def,"Bytes",Tag,"TagIn",no_length,
+% ?PRIMITIVE,Prop),
+% [];
+% {constructed,bif} ->
+% emit({" 'dec_",ObjName,'_',FieldName,"'(Bytes,",
+% {asis,Prop},", TagIn ++ ",{asis,Tag},")"}),
+% [{['dec_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'dec_",TypeName,"'(Bytes, ",
+% {asis,Prop},", TagIn ++ ",{asis,Tag},")"}),
+% [];
+% TypeName ->
+% emit({" 'dec_",TypeName,"'(Bytes, ",{asis,Prop},
+% ", TagIn ++ ",{asis,Tag},")"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Bytes, TagIn, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+% "'(H, Bytes, TagIn, T)"});
+% TypeName ->
+% emit({indent(3),"'dec_",TypeName,
+% "'(H, Bytes, TagIn, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} ->
+% []
+% end,
+% gen_decode_objectfields(Erules,Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_decode_objectfields(Erules,C,O,[H|T],CAcc) ->
+% gen_decode_objectfields(Erules,C,O,T,CAcc);
+% gen_decode_objectfields(_,_,_,[],CAcc) ->
+% CAcc.
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+%% emit({Name,"(Bytes, OptOrMand) ->",nl}),
+%% emit({" ",Name,"(Bytes, OptOrMand, []).",nl,nl}),
+ emit({Name,"(Bytes, OptOrMand, TagIn) ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(ber,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+ gen_dec_prim(ber,Def,Bytes,Tag,"TagIn",no_length,
+ ?PRIMITIVE,opt_or_default),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",opt_or_default, TagIn ++ ",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,", opt_or_default,TagIn ++ ",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,
+ ", opt_or_default,TagIn ++ ",{asis,Tag},")"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,
+ ",opt_or_default, TagIn ++ ",{asis,Tag},")"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(ber,Type,Bytes,Tag,"TagIn",no_length,
+ ?PRIMITIVE,opt_or_default),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes,
+ " ,opt_or_default, TagIn ++ ",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,
+ ", opt_or_defualt, TagIn ++ ",{asis,Tag},")",nl]),
+ []
+ end.
+
+
+more_genfields([]) ->
+ false;
+more_genfields([Field|Fields]) ->
+ case element(1,Field) of
+ typefield ->
+ true;
+ objectfield ->
+ true;
+ _ ->
+ more_genfields(Fields)
+ end.
+
+
+
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = (ClassDef#classdef.typespec)#objectclass.fields,
+ InternalFuncs=gen_objset_enc(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1,[]),
+ gen_objset_dec(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+%% gen_objset_enc iterates over the objects of the object set
+gen_objset_enc(_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(ObjSName,UniqueName,
+ [{ObjName,Val,Fields},T|Rest],ClName,ClFields,NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/4"}),
+ {[],NthObj}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc ++ Acc);
+gen_objset_enc(ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",{asis,Val},") ->",nl}),
+ {InternalFunc,_}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/4"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc ++ Acc;
+%% See X.681 Annex E for the following case
+gen_objset_enc(ObjSetName,_UniqueName,['EXTENSIONMARK'],
+ _ClName,_ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_Attr, Val, _TagIn, _RestPrimFieldName) ->",nl}),
+ emit({indent(6),"Len = case Val of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Val)",nl,indent(6),"end,"}),
+ emit({indent(6),"{Val,Len}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, TagIn, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N} = emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, TagIn, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N} = emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_H|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+
+emit_inner_of_fun(TDef = #typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(ber,Type,["TagIn ++ ",{asis,Tag}],"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val,TagIn ++ ",
+ {asis,Tag},")"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val, TagIn ++ ",
+ {asis,Tag},")"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name,typespec=Type},_) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ emit({indent(12),"'enc_",Name,"'(Val, TagIn ++ ",{asis,Tag},")"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_encode_prim(ber,Type,["TagIn ++ ",{asis,Tag}],"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val, TagIn ++ ",{asis,Tag},")"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val, TagIn ++ ",{asis,Tag},")"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val, TagIn ++ ",{asis,Tag},")"})
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(Erules,ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj)->
+ emit({"'getdec_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Erules,Fields,ClFields,ObjSName,
+ NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"}),
+ NthObj
+ end,
+ emit({";",nl}),
+ gen_objset_dec(Erules,ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj);
+gen_objset_dec(Erules,ObjSetName,UniqueName,[{ObjName,Val,Fields}],_ClName,
+ ClFields,NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(",{asis,UniqueName},",",{asis,Val},") ->",nl}),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Erules,Fields,ClFields,ObjSetName,
+ NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"})
+ end,
+ emit({".",nl,nl});
+gen_objset_dec(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,_ClFields,
+ _NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Bytes, _, _) ->",nl}),
+ emit({indent(6),"Len = case Bytes of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Bytes)",nl,indent(6),"end,"}),
+ emit({indent(6),"{Bytes,[],Len}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ ok;
+gen_objset_dec(_,_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Erules,Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Bytes, TagIn, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl}),
+ N=emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Bytes, TagIn, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ N=emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Erules,Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Erules,Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Erules,Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Erules,Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Erules,Fields,[_H|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,_,[],_,NthObj) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ NthObj.
+
+emit_inner_of_decfun(Erules,#typedef{name={ExtName,Name},typespec=Type},
+ Prop,InternalDefFunName) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(Erules,Type,"Bytes",Tag,"TagIn",no_length,
+ ?PRIMITIVE,Prop),
+ 0;
+ {constructed,bif} ->
+ emit({indent(12),"'dec_",
+ asn1ct_gen:list2name(InternalDefFunName),"'(Bytes, ",Prop,
+ ", TagIn ++ ",{asis,Tag},")"}),
+ 1;
+ _ ->
+ emit({indent(12),"'",ExtName,"':'dec_",Name,"'(Bytes, ",Prop,
+ ", TagIn ++ ",{asis,Tag},")"}),
+ 0
+ end;
+emit_inner_of_decfun(_,#typedef{name=Name,typespec=Type},Prop,_) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ emit({indent(12),"'dec_",Name,"'(Bytes, ",Prop,", TagIn ++ ",
+ {asis,Tag},")"}),
+ 0;
+emit_inner_of_decfun(Erules,Type,Prop,_) when record(Type,type) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ CurrMod = get(currmod),
+ Def = Type#type.def,
+ InnerType = asn1ct_gen:get_inner(Def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ case WhatKind of
+ {primitive,bif} ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_dec_prim(Erules,Type,"Bytes",Tag,"TagIn",no_length,
+ ?PRIMITIVE,Prop);
+% TRef when record(TRef,typereference) ->
+% T = TRef#typereference.val,
+% emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,
+ "'(Bytes, ",Prop,", TagIn ++ ",{asis,Tag},")"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+ T,"'(Bytes, ",Prop,", TagIn ++ ",{asis,Tag},")"})
+ end,
+ 0.
+
+
+gen_internal_funcs(_,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit({"'dec_",TypeDef#typedef.name,"'(Bytes, ",
+ unused_optormand_var("OptOrMand",(TypeDef#typedef.typespec)#type.def),", TagIn) ->",nl}),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+
+decode_class('UNIVERSAL') ->
+ ?UNIVERSAL;
+decode_class('APPLICATION') ->
+ ?APPLICATION;
+decode_class('CONTEXT') ->
+ ?CONTEXT;
+decode_class('PRIVATE') ->
+ ?PRIVATE.
+
+decode_type('BOOLEAN') -> 1;
+decode_type('INTEGER') -> 2;
+decode_type('BIT STRING') -> 3;
+decode_type('OCTET STRING') -> 4;
+decode_type('NULL') -> 5;
+decode_type('OBJECT IDENTIFIER') -> 6;
+decode_type('OBJECT DESCRIPTOR') -> 7;
+decode_type('EXTERNAL') -> 8;
+decode_type('REAL') -> 9;
+decode_type('ENUMERATED') -> 10;
+decode_type('EMBEDDED_PDV') -> 11;
+decode_type('SEQUENCE') -> 16;
+decode_type('SEQUENCE OF') -> 16;
+decode_type('SET') -> 17;
+decode_type('SET OF') -> 17;
+decode_type('NumericString') -> 18;
+decode_type('PrintableString') -> 19;
+decode_type('TeletexString') -> 20;
+decode_type('VideotexString') -> 21;
+decode_type('IA5String') -> 22;
+decode_type('UTCTime') -> 23;
+decode_type('GeneralizedTime') -> 24;
+decode_type('GraphicString') -> 25;
+decode_type('VisibleString') -> 26;
+decode_type('GeneralString') -> 27;
+decode_type('UniversalString') -> 28;
+decode_type('BMPString') -> 30;
+decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative
+decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
+
+add_removed_bytes() ->
+ asn1ct_name:delete(rb),
+ add_removed_bytes(asn1ct_name:all(rb)).
+
+add_removed_bytes([H,T1|T]) ->
+ emit({{var,H},"+"}),
+ add_removed_bytes([T1|T]);
+add_removed_bytes([H|T]) ->
+ emit({{var,H}}),
+ add_removed_bytes(T);
+add_removed_bytes([]) ->
+ true.
+
+mkfuncname(WhatKind,DecOrEnc) ->
+ case WhatKind of
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ CurrMod = get(currmod),
+ case CurrMod of
+ Mod ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ _ ->
+% io:format("CurrMod: ~p, Mod: ~p~n",[CurrMod,Mod]),
+ lists:concat(["'",Mod,"':'",DecOrEnc,"_",EType,"'"])
+ end;
+ #'typereference'{val=EType} ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ 'ASN1_OPEN_TYPE' ->
+ lists:concat(["'",DecOrEnc,"_",WhatKind,"'"])
+
+ end.
+
+optionals(L) -> optionals(L,[],1).
+
+optionals([{'EXTENSIONMARK',_,_}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos); % optionals in extension are currently not handled
+optionals([#'ComponentType'{name=Name,prop='OPTIONAL'}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{name=Name,prop={'DEFAULT',_}}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos+1);
+optionals([],Acc,_) ->
+ lists:reverse(Acc).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%% if the original option was ber and it has been wrapped to ber_bin
+%% turn it back to ber
+re_wrap_erule(ber_bin) ->
+ case get(encoding_options) of
+ Options when list(Options) ->
+ case lists:member(ber,Options) of
+ true -> ber;
+ _ -> ber_bin
+ end;
+ _ -> ber_bin
+ end;
+re_wrap_erule(Erule) ->
+ Erule.
+
+is_already_generated(Operation,Name) ->
+ case get(class_default_type) of
+ undefined ->
+ put(class_default_type,[{Operation,Name}]),
+ false;
+ GeneratedList ->
+ case lists:member({Operation,Name},GeneratedList) of
+ true ->
+ true;
+ false ->
+ put(class_default_type,[{Operation,Name}|GeneratedList]),
+ false
+ end
+ end.
+
+get_class_fields(#classdef{typespec=ObjClass}) ->
+ ObjClass#objectclass.fields;
+get_class_fields(#objectclass{fields=Fields}) ->
+ Fields;
+get_class_fields(_) ->
+ [].
+
+get_object_field(Name,ObjectFields) ->
+ case lists:keysearch(Name,1,ObjectFields) of
+ {value,Field} -> Field;
+ false -> false
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl
new file mode 100644
index 0000000000..be8ae6f8a5
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl
@@ -0,0 +1,1568 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_ber_bin_v2.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_gen_ber_bin_v2).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+
+-export([pgen/4]).
+-export([decode_class/1, decode_type/1]).
+-export([add_removed_bytes/0]).
+-export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]).
+-export([gen_encode_prim/4]).
+-export([gen_dec_prim/7]).
+-export([gen_objectset_code/2, gen_obj_code/3]).
+-export([encode_tag_val/3]).
+-export([gen_inc_decode/2]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+ % the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+ % primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+-define(T_ObjectDescriptor, ?UNIVERSAL bor ?PRIMITIVE bor 7).
+ % restricted character string types
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList,PTypeList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate ENCODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+
+%%===============================================================================
+%% encode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+ ", ObjFun";
+ false ->
+ ""
+ end,
+
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([nl,nl,nl,"%%================================"]),
+ emit([nl,"%% ",asn1ct_gen:list2name(Typename)]),
+ emit([nl,"%%================================",nl]),
+ case length(Typename) of
+ 1 -> % top level type
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",ObjFun,") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, ", {asis,lists:reverse(Type#type.tag)},ObjFun,").",nl,nl]);
+ _ -> % embedded type with constructed name
+ true
+ end,
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,
+ ") when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn",ObjFun,");",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}, TagIn",ObjFun,") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,");",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,") ->",nl," "]),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end;
+
+%%===============================================================================
+%% encode ComponentType
+%%===============================================================================
+
+gen_encode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_Prop,_Tags}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ gen_encode(Erules,NewTname,NewType).
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Type = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ emit([nl,nl,"%%================================"]),
+ emit([nl,"%% ",Typename]),
+ emit([nl,"%%================================",nl]),
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, ", {asis,lists:reverse(Tag)},").",nl,nl]),
+
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn) when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn);",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),"',Val}, TagIn) ->",nl}),
+ emit({" 'enc_",asn1ct_gen:list2name(Typename),"'(Val, TagIn);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val, TagIn) ->",nl}),
+ CurrentMod = get(currmod),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ {primitive,bif} ->
+ gen_encode_prim(ber,Type,"TagIn","Val"),
+ emit([".",nl]);
+ #typereference{val=Ename} ->
+ emit([" 'enc_",Ename,"'(Val, TagIn).",nl]);
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, TagIn).",nl]);
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, TagIn).",nl]);
+ 'ASN1_OPEN_TYPE' ->
+ emit(["%% OPEN TYPE",nl]),
+ gen_encode_prim(ber,
+ Type#type{def='ASN1_OPEN_TYPE'},
+ "TagIn","Val"),
+ emit([".",nl])
+ end.
+
+gen_encode_prim(_Erules,D,DoTag,Value) when record(D,type) ->
+
+%%% Constraint is currently not used for BER (except for BitString) and therefore replaced
+%%% with [] as a placeholder
+ BitStringConstraint = D#type.constraint,
+ Constraint = [],
+ asn1ct_name:new(enumval),
+ case D#type.def of
+ 'BOOLEAN' ->
+ emit_encode_func('boolean',Value,DoTag);
+ 'INTEGER' ->
+ emit_encode_func('integer',Constraint,Value,DoTag);
+ {'INTEGER',NamedNumberList} ->
+ emit_encode_func('integer',Constraint,Value,
+ NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList={_,_}} ->
+
+ emit(["case (case ",Value," of {asn1_enum,_}->",Value,";{_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList} ->
+
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+
+ {'BIT STRING',NamedNumberList} ->
+ emit_encode_func('bit_string',BitStringConstraint,Value,
+ NamedNumberList,DoTag);
+ 'ANY' ->
+ emit_encode_func('open_type', Value,DoTag);
+ 'NULL' ->
+ emit_encode_func('null',Value,DoTag);
+ 'OBJECT IDENTIFIER' ->
+ emit_encode_func("object_identifier",Value,DoTag);
+ 'ObjectDescriptor' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_ObjectDescriptor,DoTag);
+ 'OCTET STRING' ->
+ emit_encode_func('octet_string',Constraint,Value,DoTag);
+ 'NumericString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_NumericString,DoTag);
+ 'TeletexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_TeletexString,DoTag);
+ 'VideotexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VideotexString,DoTag);
+ 'GraphicString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GraphicString,DoTag);
+ 'VisibleString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VisibleString,DoTag);
+ 'GeneralString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GeneralString,DoTag);
+ 'PrintableString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_PrintableString,DoTag);
+ 'IA5String' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_IA5String,DoTag);
+ 'UniversalString' ->
+ emit_encode_func('universal_string',Constraint,Value,DoTag);
+ 'BMPString' ->
+ emit_encode_func('BMP_string',Constraint,Value,DoTag);
+ 'UTCTime' ->
+ emit_encode_func('utc_time',Constraint,Value,DoTag);
+ 'GeneralizedTime' ->
+ emit_encode_func('generalized_time',Constraint,Value,DoTag);
+ 'ASN1_OPEN_TYPE' ->
+ emit_encode_func('open_type', Value,DoTag);
+ XX ->
+ exit({'can not encode' ,XX})
+ end.
+
+
+emit_encode_func(Name,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Value,Tags);
+emit_encode_func(Name,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Tags);
+emit_encode_func(Name,Constraint,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Asis,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Asis,Tags);
+emit_encode_func(Name,Constraint,Value,Asis,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,
+ ", ",{asis,Asis},
+ ", ",Tags,")"]).
+
+emit_enc_enumerated_cases({L1,L2}, Tags) ->
+ emit_enc_enumerated_cases(L1++L2, Tags, ext);
+emit_enc_enumerated_cases(L, Tags) ->
+ emit_enc_enumerated_cases(L, Tags, noext).
+
+emit_enc_enumerated_cases([{EnumName,EnumVal},H2|T], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+ emit_enc_enumerated_cases([H2|T], Tags, Ext);
+emit_enc_enumerated_cases([{EnumName,EnumVal}], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+ case Ext of
+ noext -> emit([";",nl]);
+ ext ->
+ emit([";",nl,"{asn1_enum,",{curr,enumval},"} -> ",
+ "?RT_BER:encode_enumerated(",{curr,enumval},",",Tags,");",nl]),
+ asn1ct_name:new(enumval)
+ end,
+ emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]),
+ emit([nl,"end"]).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate DECODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% decode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ Def = Type#typedef.typespec,
+ InnerTag = Def#type.tag ,
+
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- InnerTag],
+
+ Prefix =
+ case {asn1ct:get_gen_state_field(active),
+ asn1ct:get_gen_state_field(prefix)} of
+ {true,Pref} -> Pref;
+ _ -> "dec_"
+ end,
+ emit({nl,nl}),
+ emit(["'",Prefix,Type#typedef.name,"'(Tlv) ->",nl]),
+ emit([" '",Prefix,Type#typedef.name,"'(Tlv, ",{asis,Tag},").",nl,nl]),
+ emit(["'",Prefix,Type#typedef.name,"'(Tlv, TagIn) ->",nl]),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,Type).
+
+gen_inc_decode(Erules,Type) when record(Type,typedef) ->
+ Prefix = asn1ct:get_gen_state_field(prefix),
+ emit({nl,nl}),
+ emit(["'",Prefix,Type#typedef.name,"'(Tlv, TagIn) ->",nl]),
+ gen_decode_user(Erules,Type).
+
+%%===============================================================================
+%% decode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+%% This gen_decode is called by the gen_decode/3 that decodes
+%% ComponentType and the type of a SEQUENCE OF/SET OF.
+gen_decode(Erules,Tname,Type) when record(Type,type) ->
+ Typename = Tname,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ Prefix =
+ case asn1ct:get_gen_state_field(active) of
+ true -> "'dec-inc-";
+ _ -> "'dec_"
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit([Prefix,asn1ct_gen:list2name(Typename),"'(Tlv, TagIn",ObjFun,") ->",nl]),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ Rec when record(Rec,'Externaltypereference') ->
+ case {Typename,asn1ct:get_gen_state_field(namelist)} of
+ {[Cname|_],[{Cname,_}|_]} -> %%
+ %% This referenced type must only be generated
+ %% once as incomplete partial decode. Therefore we
+ %% have to check whether this function already is
+ %% generated.
+ case asn1ct:is_function_generated(Typename) of
+ true ->
+ ok;
+ _ ->
+ asn1ct:generated_refed_func(Typename),
+ #'Externaltypereference'{module=M,type=Name}=Rec,
+ TypeDef = asn1_db:dbget(M,Name),
+ gen_decode(Erules,TypeDef)
+ end;
+ _ ->
+ true
+ end;
+ _ ->
+ true
+ end;
+
+
+%%===============================================================================
+%% decode ComponentType
+%%===============================================================================
+
+gen_decode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_Prop,_Tags}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ case {asn1ct:get_gen_state_field(active),
+ asn1ct:get_tobe_refed_func(NewTname)} of
+ {true,{_,NameList}} ->
+ asn1ct:update_gen_state(namelist,NameList),
+ %% remove to gen_refed_funcs list from tobe_refed_funcs later
+ gen_decode(Erules,NewTname,NewType);
+ {No,_} when No == false; No == undefined ->
+ gen_decode(Erules,NewTname,NewType);
+ _ ->
+ ok
+ end.
+
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ BytesVar = "Tlv",
+ case asn1ct_gen:type(InnerType) of
+ 'ASN1_OPEN_TYPE' ->
+ asn1ct_name:new(len),
+ gen_dec_prim(ber, Def#type{def='ASN1_OPEN_TYPE'},
+ BytesVar,{string,"TagIn"}, [] ,
+ ?PRIMITIVE,"OptOrMand"),
+ emit({".",nl,nl});
+ {primitive,bif} ->
+ asn1ct_name:new(len),
+ gen_dec_prim(ber, Def, BytesVar,{string,"TagIn"},[] ,
+ ?PRIMITIVE,"OptOrMand"),
+ emit([".",nl,nl]);
+ {constructed,bif} ->
+ asn1ct:update_namelist(D#typedef.name),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ TheType ->
+ DecFunName = mkfuncname(TheType,dec),
+ emit([DecFunName,"(",BytesVar,
+ ", TagIn)"]),
+ emit([".",nl,nl])
+ end.
+
+
+gen_dec_prim(_Erules,Att,BytesVar,DoTag,_TagIn,_Form,_OptOrMand) ->
+ Typename = Att#type.def,
+%% Currently not used for BER replaced with [] as place holder
+%% Constraint = Att#type.constraint,
+%% Constraint = [],
+ Constraint =
+ case get_constraint(Att#type.constraint,'SizeConstraint') of
+ no -> [];
+ Tc -> Tc
+ end,
+ ValueRange =
+ case get_constraint(Att#type.constraint,'ValueRange') of
+ no -> [];
+ Tv -> Tv
+ end,
+ SingleValue =
+ case get_constraint(Att#type.constraint,'SingleValue') of
+ no -> [];
+ Sv -> Sv
+ end,
+ AsBin = case get(binary_strings) of
+ true -> "_as_bin";
+ _ -> ""
+ end,
+ NewTypeName = case Typename of
+ 'ANY' -> 'ASN1_OPEN_TYPE';
+ _ -> Typename
+ end,
+% DoLength =
+ case NewTypeName of
+ 'BOOLEAN'->
+ emit({"?RT_BER:decode_boolean(",BytesVar,","}),
+ add_func({decode_boolean,2});
+ 'INTEGER' ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},","}),
+ add_func({decode_integer,3});
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_integer,4});
+ {'ENUMERATED',NamedNumberList} ->
+ emit({"?RT_BER:decode_enumerated(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_enumerated,4});
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_BER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_compact_bit_string,4});
+ _ ->
+ emit({"?RT_BER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_bit_string,4})
+ end;
+ 'NULL' ->
+ emit({"?RT_BER:decode_null(",BytesVar,","}),
+ add_func({decode_null,2});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_BER:decode_object_identifier(",BytesVar,","}),
+ add_func({decode_object_identifier,2});
+ 'ObjectDescriptor' ->
+ emit({"?RT_BER:decode_restricted_string(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_ObjectDescriptor},","}),
+ add_func({decode_restricted_string,4});
+ 'OCTET STRING' ->
+ emit({"?RT_BER:decode_octet_string",AsBin,"(",BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_octet_string,3});
+ 'NumericString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_NumericString},","}),
+ add_func({decode_restricted_string,4});
+ 'TeletexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_TeletexString},","}),
+ add_func({decode_restricted_string,4});
+ 'VideotexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VideotexString},","}),
+ add_func({decode_restricted_string,4});
+ 'GraphicString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GraphicString},","}),
+ add_func({decode_restricted_string,4});
+ 'VisibleString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VisibleString},","}),
+ add_func({decode_restricted_string,4});
+ 'GeneralString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GeneralString},","}),
+ add_func({decode_restricted_string,4});
+ 'PrintableString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_PrintableString},","}),
+ add_func({decode_restricted_string,4});
+ 'IA5String' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_IA5String},","}),
+ add_func({decode_restricted_string,4}) ;
+ 'UniversalString' ->
+ emit({"?RT_BER:decode_universal_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_universal_string,3});
+ 'BMPString' ->
+ emit({"?RT_BER:decode_BMP_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_BMP_string,3});
+ 'UTCTime' ->
+ emit({"?RT_BER:decode_utc_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_utc_time,3});
+ 'GeneralizedTime' ->
+ emit({"?RT_BER:decode_generalized_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_generalized_time,3});
+ 'ASN1_OPEN_TYPE' ->
+ emit(["?RT_BER:decode_open_type_as_binary(",
+ BytesVar,","]),
+ add_func({decode_open_type_as_binary,2});
+ Other ->
+ exit({'can not decode' ,Other})
+ end,
+
+ case {DoTag,NewTypeName} of
+ {{string,TagStr},'ASN1_OPEN_TYPE'} ->
+ emit([TagStr,")"]);
+ {_,'ASN1_OPEN_TYPE'} ->
+ emit([{asis,DoTag},")"]);
+ {{string,TagStr},_} ->
+ emit([TagStr,")"]);
+ _ when list(DoTag) ->
+ emit([{asis,DoTag},")"])
+ end.
+
+
+int_constr([],[]) ->
+ [];
+int_constr([],ValueRange) ->
+ ValueRange;
+int_constr(SingleValue,[]) ->
+ SingleValue;
+int_constr(SV,VR) ->
+ [SV,VR].
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=M,type=ClName} = Def#'Object'.classname,
+ Class = asn1_db:dbget(M,ClName),
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+ gen_encode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+ gen_decode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed),
+ emit_tlv_format_function();
+gen_obj_code(_Erules,_Module,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Arg) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Arg,", _RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit([" {<<>>,0}"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause(" Val, [H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+% MaybeConstr=
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, RestPrimFieldName) ->",nl}),
+% CAcc=
+% case Type#typedef.name of
+% {primitive,bif} -> %%tag should be the primitive tag
+% OTag = Def#type.tag,
+% Tag = [encode_tag_val(decode_class(X#tag.class),
+% X#tag.form,X#tag.number)||
+% X <- OTag],
+% gen_encode_prim(ber,Def,{asis,lists:reverse(Tag)},
+% "Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val)"}),
+% [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,
+% "'(Val)"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val,[H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+gen_encode_objectfields(ClassName,[_C|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+% gen_encode_constr_type(Erules,[{Name,Def}|Rest]) ->
+% emit({Name,"(Val,TagIn) ->",nl}),
+% InnerType = asn1ct_gen:get_inner(Def#type.def),
+% asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+% gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ -> gen_encode_user(Erules,TypeDef)
+ end,
+ gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),
+ X#tag.form,X#tag.number)||
+ X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+% OTag = Def#type.tag,
+% Tag = [encode_tag_val(decode_class(X#tag.class),
+% X#tag.form,X#tag.number)||
+% X <- OTag],
+ gen_encode_prim(ber,Def,{asis,lists:reverse(Tag)},
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val,",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val,",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val,",{asis,Tag},")"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Bytes)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(ber,Type,{asis,lists:reverse(Tag)},"Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, ",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, ",{asis,Tag},")",nl]),
+ []
+% 'ASN1_OPEN_TYPE' ->
+% emit(["%% OPEN TYPE",nl]),
+% gen_encode_prim(ber,
+% Type#type{def='ASN1_OPEN_TYPE'},
+% "TagIn","Val"),
+% emit([".",nl])
+ end.
+
+%%%%%%%%%%%%%%%%
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Arg) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Arg,",_) ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause(" _"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes"),
+ emit_tlv_format("Bytes"),
+ gen_decode_default_call(ClassName,Name,"Tlv",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes"),
+ emit_tlv_format("Bytes"),
+ gen_decode_field_call(ObjName,Name,"Tlv",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,[H|T]) ->",nl]),
+% emit_tlv_format("Bytes"),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,[H|T]"),
+% emit_tlv_format("Bytes"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_C|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+emit_tlv_format(Bytes) ->
+ notice_tlv_format_gen(), % notice for generating of tlv_format/1
+ emit([" Tlv = tlv_format(",Bytes,"),",nl]).
+
+notice_tlv_format_gen() ->
+ Module = get(currmod),
+% io:format("Noticed: ~p~n",[Module]),
+ case get(tlv_format) of
+ {done,Module} ->
+ ok;
+ _ -> % true or undefined
+ put(tlv_format,true)
+ end.
+
+emit_tlv_format_function() ->
+ Module = get(currmod),
+% io:format("Tlv formated: ~p",[Module]),
+ case get(tlv_format) of
+ true ->
+% io:format(" YES!~n"),
+ emit_tlv_format_function1(),
+ put(tlv_format,{done,Module});
+ _ ->
+% io:format(" NO!~n"),
+ ok
+ end.
+emit_tlv_format_function1() ->
+ emit(["tlv_format(Bytes) when binary(Bytes) ->",nl,
+ " {Tlv,_}=?RT_BER:decode(Bytes),",nl,
+ " Tlv;",nl,
+ "tlv_format(Bytes) ->",nl,
+ " Bytes.",nl]).
+
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+ emit([Name,"(Tlv, TagIn) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+%%%%%%%%%%%
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number ||
+ X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+ gen_dec_prim(ber,Def,Bytes,Tag,"TagIn",?PRIMITIVE,
+ opt_or_default),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,",",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,",",{asis,Tag},")"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,",",
+ {asis,Tag},")"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',
+ FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(ber,Type,Bytes,Tag,"TagIn",
+ ?PRIMITIVE,opt_or_default),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes, " ,",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,", ",
+ {asis,Tag},")",nl]),
+ []
+% 'ASN1_OPEN_TYPE' ->
+% emit(["%% OPEN TYPE",nl]),
+% gen_encode_prim(ber,
+% Type#type{def='ASN1_OPEN_TYPE'},
+% "TagIn","Val"),
+% emit([".",nl])
+ end.
+%%%%%%%%%%%
+
+is_already_generated(Operation,Name) ->
+ case get(class_default_type) of
+ undefined ->
+ put(class_default_type,[{Operation,Name}]),
+ false;
+ GeneratedList ->
+ case lists:member({Operation,Name},GeneratedList) of
+ true ->
+ true;
+ false ->
+ put(class_default_type,[{Operation,Name}|GeneratedList]),
+ false
+ end
+ end.
+
+more_genfields([]) ->
+ false;
+more_genfields([Field|Fields]) ->
+ case element(1,Field) of
+ typefield ->
+ true;
+ objectfield ->
+ true;
+ _ ->
+ more_genfields(Fields)
+ end.
+
+
+
+
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = get_class_fields(ClassDef),
+ InternalFuncs=gen_objset_enc(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassFields,1,[]),
+ gen_objset_dec(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+%% gen_objset_enc iterates over the objects of the object set
+gen_objset_enc(_,_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(Erules,ObjSName,UniqueName,
+ [{ObjName,Val,Fields},T|Rest],ClName,ClFields,
+ NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(Erules,ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc ++ Acc);
+gen_objset_enc(_,ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,_} =
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc ++ Acc;
+%% See X.681 Annex E for the following case
+gen_objset_enc(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Val, _RestPrimFieldName) ->",nl}),
+ emit({indent(6),"Len = case Val of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Val)",nl,indent(6),"end,"}),
+ emit({indent(6),"{Val,Len}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+emit_inner_of_fun(TDef=#typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+% remove Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(ber,Type,[{asis,lists:reverse(Tag)}],"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val)"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val)"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name},_) ->
+% OTag = Type#type.tag,
+% remove Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ emit({indent(12),"'enc_",Name,"'(Val)"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+% OTag = Type#type.tag,
+% remove Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ case Type#type.def of
+ Def when atom(Def) ->
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),
+ X#tag.form,X#tag.number)||X <- OTag],
+ emit([indent(9),Def," ->",nl,indent(12)]),
+ gen_encode_prim(ber,Type,{asis,lists:reverse(Tag)},"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit([indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val)"]);
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val)"]);
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val)"])
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(Erules,ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj)->
+ emit(["'getdec_",ObjSName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl]),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit([" fun 'dec_",ObjName,"'/3"]),
+ NthObj
+ end,
+ emit([";",nl]),
+ gen_objset_dec(Erules,ObjSName,UniqueName,[T|Rest],ClName,
+ ClFields,NewNthObj);
+gen_objset_dec(_,ObjSetName,UniqueName,[{ObjName,Val,Fields}],
+ _ClName,ClFields,NthObj) ->
+ emit(["'getdec_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl]),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit([" fun 'dec_",ObjName,"'/3"])
+ end,
+ emit([".",nl,nl]),
+ ok;
+gen_objset_dec(Erules,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj) ->
+ emit(["'getdec_",ObjSetName,"'(_, _) ->",nl]),
+ emit([indent(2),"fun(_,Bytes, _RestPrimFieldName) ->",nl]),
+ case Erules of
+ ber_bin_v2 ->
+ emit([indent(4),"case Bytes of",nl,
+ indent(6),"Bin when binary(Bin) -> ",nl,
+ indent(8),"Bin;",nl,
+ indent(6),"_ ->",nl,
+ indent(8),"?RT_BER:encode(Bytes)",nl,
+ indent(4),"end",nl]);
+ _ ->
+ emit([indent(6),"Len = case Bytes of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Bytes)",nl,indent(6),"end,"]),
+ emit([indent(4),"{Bytes,[],Len}",nl])
+ end,
+ emit([indent(2),"end.",nl,nl]),
+ ok;
+gen_objset_dec(_,_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl]),
+ N=emit_inner_of_decfun(Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl]),
+ emit([indent(9),{asis,Name}," ->",nl]),
+ N=emit_inner_of_decfun(Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit([";",nl]),
+ emit_inner_of_decfun(Type,DecProp,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit([";",nl,indent(9),{asis,Name}," ->",nl]),
+ emit_inner_of_decfun(Type,DecProp,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Fields,[_|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,[],_,NthObj) ->
+ emit([nl,indent(6),"end",nl]),
+ emit([indent(3),"end"]),
+ NthObj.
+
+emit_inner_of_decfun(#typedef{name={ExtName,Name},typespec=Type},Prop,
+ InternalDefFunName) ->
+ OTag = Type#type.tag,
+%% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- OTag],
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(ber,Type,"Bytes",Tag,"TagIn",
+ ?PRIMITIVE,Prop),
+ 0;
+ {constructed,bif} ->
+ emit([indent(12),"'dec_",
+% asn1ct_gen:list2name(InternalDefFunName),"'(Bytes, ",Prop,
+% ", ",{asis,Tag},")"]),
+ asn1ct_gen:list2name(InternalDefFunName),"'(Bytes, ",
+ {asis,Tag},")"]),
+ 1;
+ _ ->
+ emit([indent(12),"'",ExtName,"':'dec_",Name,"'(Bytes)"]),
+ 0
+ end;
+emit_inner_of_decfun(#typedef{name=Name},_Prop,_) ->
+ emit([indent(12),"'dec_",Name,"'(Bytes)"]),
+ 0;
+emit_inner_of_decfun(Type,Prop,_) when record(Type,type) ->
+ OTag = Type#type.tag,
+%% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- OTag],
+ CurrMod = get(currmod),
+ Def = Type#type.def,
+ InnerType = asn1ct_gen:get_inner(Def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ case WhatKind of
+ {primitive,bif} ->
+ emit([indent(9),Def," ->",nl,indent(12)]),
+ gen_dec_prim(ber,Type,"Bytes",Tag,"TagIn",
+ ?PRIMITIVE,Prop);
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),"'dec_",T,
+% "'(Bytes, ",Prop,")"]);
+ "'(Bytes)"]);
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+% T,"'(Bytes, ",Prop,")"])
+ T,"'(Bytes)"])
+ end,
+ 0.
+
+gen_internal_funcs(_,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit([nl,nl,"'dec_",TypeDef#typedef.name,
+% "'(Tlv, OptOrMand, TagIn) ->",nl]),
+ "'(Tlv, TagIn) ->",nl]),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+
+decode_class('UNIVERSAL') ->
+ ?UNIVERSAL;
+decode_class('APPLICATION') ->
+ ?APPLICATION;
+decode_class('CONTEXT') ->
+ ?CONTEXT;
+decode_class('PRIVATE') ->
+ ?PRIVATE.
+
+decode_type('BOOLEAN') -> 1;
+decode_type('INTEGER') -> 2;
+decode_type('BIT STRING') -> 3;
+decode_type('OCTET STRING') -> 4;
+decode_type('NULL') -> 5;
+decode_type('OBJECT IDENTIFIER') -> 6;
+decode_type('OBJECT DESCRIPTOR') -> 7;
+decode_type('EXTERNAL') -> 8;
+decode_type('REAL') -> 9;
+decode_type('ENUMERATED') -> 10;
+decode_type('EMBEDDED_PDV') -> 11;
+decode_type('SEQUENCE') -> 16;
+decode_type('SEQUENCE OF') -> 16;
+decode_type('SET') -> 17;
+decode_type('SET OF') -> 17;
+decode_type('NumericString') -> 18;
+decode_type('PrintableString') -> 19;
+decode_type('TeletexString') -> 20;
+decode_type('VideotexString') -> 21;
+decode_type('IA5String') -> 22;
+decode_type('UTCTime') -> 23;
+decode_type('GeneralizedTime') -> 24;
+decode_type('GraphicString') -> 25;
+decode_type('VisibleString') -> 26;
+decode_type('GeneralString') -> 27;
+decode_type('UniversalString') -> 28;
+decode_type('BMPString') -> 30;
+decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative
+decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
+
+add_removed_bytes() ->
+ asn1ct_name:delete(rb),
+ add_removed_bytes(asn1ct_name:all(rb)).
+
+add_removed_bytes([H,T1|T]) ->
+ emit({{var,H},"+"}),
+ add_removed_bytes([T1|T]);
+add_removed_bytes([H|T]) ->
+ emit({{var,H}}),
+ add_removed_bytes(T);
+add_removed_bytes([]) ->
+ true.
+
+mkfuncname(WhatKind,DecOrEnc) ->
+ case WhatKind of
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ CurrMod = get(currmod),
+ case CurrMod of
+ Mod ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ _ ->
+% io:format("CurrMod: ~p, Mod: ~p~n",[CurrMod,Mod]),
+ lists:concat(["'",Mod,"':'",DecOrEnc,"_",EType,"'"])
+ end;
+ #'typereference'{val=EType} ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ 'ASN1_OPEN_TYPE' ->
+ lists:concat(["'",DecOrEnc,"_",WhatKind,"'"])
+
+ end.
+
+optionals(L) -> optionals(L,[],1).
+
+optionals([{'EXTENSIONMARK',_,_}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos); % optionals in extension are currently not handled
+optionals([#'ComponentType'{name=Name,prop='OPTIONAL'}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{name=Name,prop={'DEFAULT',_}}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos+1);
+optionals([],Acc,_) ->
+ lists:reverse(Acc).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+
+get_class_fields(#classdef{typespec=ObjClass}) ->
+ ObjClass#objectclass.fields;
+get_class_fields(#objectclass{fields=Fields}) ->
+ Fields;
+get_class_fields(_) ->
+ [].
+
+get_object_field(Name,ObjectFields) ->
+ case lists:keysearch(Name,1,ObjectFields) of
+ {value,Field} -> Field;
+ false -> false
+ end.
+
+%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%% 8bit Int | binary
+encode_tag_val(Class, Form, TagNo) when (TagNo =< 30) ->
+ <<(Class bsr 6):2,(Form bsr 5):1,TagNo:5>>;
+
+encode_tag_val(Class, Form, TagNo) ->
+ {Octets,_Len} = mk_object_val(TagNo),
+ BinOct = list_to_binary(Octets),
+ <<(Class bsr 6):2, (Form bsr 5):1, 31:5,BinOct/binary>>.
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+add_func(F={_Func,_Arity}) ->
+ ets:insert(asn1_functab,{F}).
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per.erl
new file mode 100644
index 0000000000..8cd8d34918
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per.erl
@@ -0,0 +1,1190 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_per.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_gen_per).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-export([pgen/4,gen_dec_prim/3,gen_encode_prim/4]).
+-export([gen_obj_code/3,gen_objectset_code/2]).
+-export([gen_decode/2, gen_decode/3]).
+-export([gen_encode/2, gen_encode/3]).
+-export([is_already_generated/2,more_genfields/1,get_class_fields/1,
+ get_object_field/2]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%% Generate ENCODING ******************************
+%%****************************************x
+
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+%% case Type#typedef.typespec of
+%% Def when record(Def,type) ->
+%% gen_encode_user(Erules,Type);
+%% Def when tuple(Def),(element(1,Def) == 'Object') ->
+%% gen_encode_object(Erules,Type);
+%% Other ->
+%% exit({error,{asn1,{unknown,Other}}})
+%% end.
+
+gen_encode(Erules,Typename,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTypename = [Cname|Typename],
+ gen_encode(Erules,NewTypename,Type);
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+%% lists:concat([", ObjFun",Name]);
+ ", ObjFun";
+ false ->
+ ""
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ case InnerType of
+ 'SET' ->
+ true;
+ 'SEQUENCE' ->
+ true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}",ObjFun,") ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",ObjFun,");",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val",ObjFun,
+ ") ->",nl}),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case InnerType of
+ 'SET' -> true;
+ 'SEQUENCE' -> true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),"'({'",asn1ct_gen:list2name(Typename),"',Val}) ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val) ->",nl}),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_encode_prim(Erules,Def,"false"),
+ emit({".",nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_encode_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"false"),
+ emit({".",nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'enc_",Etype,"'(Val).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'enc_",Etype,"'(Val).",nl,nl});
+ #typereference{val=Ename} ->
+ emit({"'enc_",Ename,"'(Val).",nl,nl});
+ {notype,_} ->
+ emit({"'enc_",InnerType,"'(Val).",nl,nl})
+ end.
+
+
+gen_encode_prim(Erules,D,DoTag) ->
+ Value = case asn1ct_name:active(val) of
+ true ->
+ asn1ct_gen:mk_var(asn1ct_name:curr(val));
+ false ->
+ "Val"
+ end,
+ gen_encode_prim(Erules,D,DoTag,Value).
+
+gen_encode_prim(_Erules,D,_DoTag,Value) when record(D,type) ->
+ Constraint = D#type.constraint,
+ case D#type.def of
+ 'INTEGER' ->
+ emit({"?RT_PER:encode_integer(", %fel
+ {asis,Constraint},",",Value,")"});
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_PER:encode_integer(",
+ {asis,Constraint},",",Value,",",
+ {asis,NamedNumberList},")"});
+ {'ENUMERATED',{Nlist1,Nlist2}} ->
+ NewList = lists:concat([[{0,X}||{X,_} <- Nlist1],['EXT_MARK'],[{1,X}||{X,_} <- Nlist2]]),
+ NewC = [{'ValueRange',{0,length(Nlist1)-1}}],
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NewC, NewList++[{asn1_enum,length(Nlist1)-1}], 0);
+ {'ENUMERATED',NamedNumberList} ->
+ NewList = [X||{X,_} <- NamedNumberList],
+ NewC = [{'ValueRange',{0,length(NewList)-1}}],
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NewC, NewList, 0);
+ {'BIT STRING',NamedNumberList} ->
+ emit({"?RT_PER:encode_bit_string(",
+ {asis,Constraint},",",Value,",",
+ {asis,NamedNumberList},")"});
+ 'NULL' ->
+ emit({"?RT_PER:encode_null(",Value,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:encode_object_identifier(",Value,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:encode_ObjectDescriptor(",{asis,Constraint},
+ ",",Value,")"});
+ 'BOOLEAN' ->
+ emit({"?RT_PER:encode_boolean(",Value,")"});
+ 'OCTET STRING' ->
+ emit({"?RT_PER:encode_octet_string(",{asis,Constraint},",",Value,")"});
+ 'NumericString' ->
+ emit({"?RT_PER:encode_NumericString(",{asis,Constraint},",",Value,")"});
+ 'TeletexString' ->
+ emit({"?RT_PER:encode_TeletexString(",{asis,Constraint},",",Value,")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:encode_VideotexString(",{asis,Constraint},",",Value,")"});
+ 'UTCTime' ->
+ emit({"?RT_PER:encode_VisibleString(",{asis,Constraint},",",Value,")"});
+ 'GeneralizedTime' ->
+ emit({"?RT_PER:encode_VisibleString(",{asis,Constraint},",",Value,")"});
+ 'GraphicString' ->
+ emit({"?RT_PER:encode_GraphicString(",{asis,Constraint},",",Value,")"});
+ 'VisibleString' ->
+ emit({"?RT_PER:encode_VisibleString(",{asis,Constraint},",",Value,")"});
+ 'GeneralString' ->
+ emit({"?RT_PER:encode_GeneralString(",{asis,Constraint},",",Value,")"});
+ 'PrintableString' ->
+ emit({"?RT_PER:encode_PrintableString(",{asis,Constraint},",",Value,")"});
+ 'IA5String' ->
+ emit({"?RT_PER:encode_IA5String(",{asis,Constraint},",",Value,")"});
+ 'BMPString' ->
+ emit({"?RT_PER:encode_BMPString(",{asis,Constraint},",",Value,")"});
+ 'UniversalString' ->
+ emit({"?RT_PER:encode_UniversalString(",{asis,Constraint},",",Value,")"});
+ 'ANY' ->
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ Value, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ NewValue = case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ _ -> Value
+ end,
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ NewValue, ")"]);
+ XX ->
+ exit({asn1_error,nyi,XX})
+ end.
+
+emit_enc_enumerated_cases(C, [H], Count) ->
+ emit_enc_enumerated_case(C, H, Count),
+ emit([";",nl,"EnumVal -> exit({error,{asn1, {enumerated_not_in_range, EnumVal}}})"]),
+ emit([nl,"end"]);
+emit_enc_enumerated_cases(C, ['EXT_MARK'|T], _Count) ->
+ emit_enc_enumerated_cases(C, T, 0);
+emit_enc_enumerated_cases(C, [H1,H2|T], Count) ->
+ emit_enc_enumerated_case(C, H1, Count),
+ emit([";",nl]),
+ emit_enc_enumerated_cases(C, [H2|T], Count+1).
+
+
+
+emit_enc_enumerated_case(_C, {asn1_enum,High}, _) ->
+ emit([
+ "{asn1_enum,EnumV} when integer(EnumV), EnumV > ",High," -> ",
+ "[{bit,1},?RT_PER:encode_small_number(EnumV)]"]);
+emit_enc_enumerated_case(_C, 'EXT_MARK', _Count) ->
+ true;
+emit_enc_enumerated_case(_C, {1,EnumName}, Count) ->
+ emit(["'",EnumName,"' -> [{bit,1},?RT_PER:encode_small_number(",Count,")]"]);
+emit_enc_enumerated_case(C, {0,EnumName}, Count) ->
+ emit(["'",EnumName,"' -> [{bit,0},?RT_PER:encode_integer(",{asis,C},", ",Count,")]"]);
+emit_enc_enumerated_case(C, EnumName, Count) ->
+ emit(["'",EnumName,"' -> ?RT_PER:encode_integer(",{asis,C},", ",Count,")"]).
+
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=Mod,type=ClassName} =
+ Def#'Object'.classname,
+ Class = asn1_db:dbget(Mod,ClassName),
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+ gen_encode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+ gen_decode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed),
+ emit(nl);
+gen_obj_code(_,_,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(V) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",V,",_RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, _RestPrimFieldName) ->",nl]),
+ MaybeConstr =
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit(" []"),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Val,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_encode_objectfields(ClassName,[_C|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, Dummy) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_encode_prim(per,Def,"false","Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val)"}),
+% [{['enc_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,"'(Val)"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_encode_objectfields(C,O,[H|T],Acc) ->
+% gen_encode_objectfields(C,O,T,Acc);
+% gen_encode_objectfields(_,_,[],Acc) ->
+% Acc.
+
+% gen_encode_constr_type(Erules,[{Name,Def}|Rest]) ->
+% emit({Name,"(Val) ->",nl}),
+% InnerType = asn1ct_gen:get_inner(Def#type.def),
+% asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+% gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ Name = lists:concat(["enc_",TypeDef#typedef.name]),
+ emit({Name,"(Val) ->",nl}),
+ Def = TypeDef#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+ gen_encode_constr_type(Erules,Rest)
+ end;
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_encode_prim(per,Def,"false",
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val)"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val)"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Val)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(per,Type,"false","Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val)",nl]),
+ []
+ end.
+
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Bytes) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},",",Bytes,
+ ",_,_RestPrimFieldName) ->",nl])
+ end,
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes"),
+ gen_decode_default_call(ClassName,Name,"Bytes",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes"),
+ gen_decode_field_call(ObjName,Name,"Bytes",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,_,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,_,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, telltype, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, telltype, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_C|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+
+% gen_decode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, RestPrimFieldName) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_dec_prim(per,Def,"Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'dec_",ObjName,'_',FieldName,
+% "'(Val, Telltype)"}),
+% [{['dec_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'dec_",TypeName,
+% "'(Val, Telltype)"}),
+% [];
+% TypeName ->
+% emit({" 'dec_",TypeName,"'(Val, Telltype)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+% "'(H, Val, Telltype, T)"});
+% TypeName ->
+% emit({indent(3),"'dec_",TypeName,
+% "'(H, Val, Telltype, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} ->
+% []
+% end,
+% gen_decode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_decode_objectfields(C,O,[H|T],CAcc) ->
+% gen_decode_objectfields(C,O,T,CAcc);
+% gen_decode_objectfields(_,_,[],CAcc) ->
+% CAcc.
+
+
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_dec_prim(per,Def,Bytes),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",telltype)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,", telltype)"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,", telltype)"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,", telltype)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(per,Type,Bytes),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ []
+ end.
+
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+ emit({Name,"(Bytes,_) ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+% more_genfields(Fields,[]) ->
+% false;
+% more_genfields(Fields,[{FieldName,_}|T]) ->
+% case is_typefield(Fields,FieldName) of
+% true -> true;
+% {false,objectfield} -> true;
+% {false,_} -> more_genfields(Fields,T)
+% end.
+
+more_genfields([]) ->
+ false;
+more_genfields([Field|Fields]) ->
+ case element(1,Field) of
+ typefield ->
+ true;
+ objectfield ->
+ true;
+ _ ->
+ more_genfields(Fields)
+ end.
+
+% is_typefield(Fields,FieldName) ->
+% case lists:keysearch(FieldName,2,Fields) of
+% {value,Field} ->
+% case element(1,Field) of
+% typefield ->
+% true;
+% Other ->
+% {false,Other}
+% end;
+% _ ->
+% false
+% end.
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+%% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = (ClassDef#classdef.typespec)#objectclass.fields,
+ InternalFuncs=
+ gen_objset_enc(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1,[]),
+ gen_objset_dec(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+%% gen_objset_enc iterates over the objects of the object set
+gen_objset_enc(_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],0}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc ++ Acc);
+gen_objset_enc(ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,_}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc++Acc;
+gen_objset_enc(ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Val, _) ->",nl}),
+ emit({indent(6),"[{octets,Val}]",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],ObjSetName,NthObj) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N} = emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_H|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+emit_inner_of_fun(TDef=#typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(per,Type,dotag,"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val)"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val)"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name},_) ->
+ emit({indent(12),"'enc_",Name,"'(Val)"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_encode_prim(erules,Type,dotag,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val)"})
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],ClName,
+ ClFields,NthObj)->
+
+ emit({"'getdec_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"}),
+ NthObj
+ end,
+ emit({";",nl}),
+ gen_objset_dec(ObjSName,UniqueName,[T|Rest],ClName,ClFields,NewNthObj);
+gen_objset_dec(ObjSetName,UniqueName,[{ObjName,Val,Fields}],_ClName,
+ ClFields,NthObj) ->
+
+ emit({"'getdec_",ObjSetName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSetName,NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"})
+ end,
+ emit({".",nl,nl}),
+ ok;
+gen_objset_dec(ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,_ClFields,
+ _NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(Attr1, Bytes, _,_) ->",nl}),
+%% emit({indent(6),"?RT_PER:decode_open_type(Bytes,[])",nl}),
+ emit({indent(6),"{Bytes,Attr1}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ ok;
+gen_objset_dec(_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Fields,[_|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,[],_,NthObj) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ NthObj.
+
+emit_inner_of_decfun(#typedef{name={ExtName,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(per,Type,"Val"),
+ 0;
+ {constructed,bif} ->
+ emit({indent(12),"'dec_",
+ asn1ct_gen:list2name(InternalDefFunName),"'(Val)"}),
+ 1;
+ _ ->
+ emit({indent(12),"'",ExtName,"':'dec_",Name,"'(Val, telltype)"}),
+ 0
+ end;
+emit_inner_of_decfun(#typedef{name=Name},_) ->
+ emit({indent(12),"'dec_",Name,"'(Val, telltype)"}),
+ 0;
+emit_inner_of_decfun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_dec_prim(erules,Type,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+ T,"'(Val)"})
+ end,
+ 0.
+
+
+gen_internal_funcs(_,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit([nl,nl,"'dec_",TypeDef#typedef.name,"'(Bytes) ->",nl]),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+
+%% DECODING *****************************
+%%***************************************
+
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ D = Type,
+ emit({nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes,_) ->",nl}),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,D).
+
+gen_decode(Erules,Tname,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTname = [Cname|Tname],
+ gen_decode(Erules,NewTname,Type);
+
+gen_decode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({nl,"'dec_",asn1ct_gen:list2name(Typename),
+ "'(Bytes,_",ObjFun,") ->",nl}),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+dbdec(Type) when list(Type)->
+ demit({"io:format(\"decoding: ",asn1ct_gen:list2name(Type),"~w~n\",[Bytes]),",nl});
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_dec_prim(Erules,Def,"Bytes"),
+ emit({".",nl,nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_dec_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"Bytes"),
+ emit({".",nl,nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(Bytes,telltype)"}),
+ emit({".",nl,nl});
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ Other ->
+ exit({error,{asn1,{unknown,Other}}})
+ end.
+
+
+gen_dec_prim(_Erules,Att,BytesVar) ->
+ Typename = Att#type.def,
+ Constraint = Att#type.constraint,
+ case Typename of
+ 'INTEGER' ->
+ emit({"?RT_PER:decode_integer(",BytesVar,",",
+ {asis,Constraint},")"});
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_PER:decode_integer(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},")"});
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_PER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},")"});
+ _ ->
+ emit({"?RT_PER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},")"})
+ end;
+ 'NULL' ->
+ emit({"?RT_PER:decode_null(",
+ BytesVar,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:decode_object_identifier(",
+ BytesVar,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:decode_ObjectDescriptor(",
+ BytesVar,")"});
+ {'ENUMERATED',{NamedNumberList1,NamedNumberList2}} ->
+ NewTup = {list_to_tuple([X||{X,_} <- NamedNumberList1]),
+ list_to_tuple([X||{X,_} <- NamedNumberList2])},
+ NewC = [{'ValueRange',{0,size(element(1,NewTup))-1}}],
+ emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+ {asis,NewC},",",
+ {asis,NewTup},")"});
+ {'ENUMERATED',NamedNumberList} ->
+ NewTup = list_to_tuple([X||{X,_} <- NamedNumberList]),
+ NewC = [{'ValueRange',{0,size(NewTup)-1}}],
+ emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+ {asis,NewC},",",
+ {asis,NewTup},")"});
+ 'BOOLEAN'->
+ emit({"?RT_PER:decode_boolean(",BytesVar,")"});
+ 'OCTET STRING' ->
+ emit({"?RT_PER:decode_octet_string(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'NumericString' ->
+ emit({"?RT_PER:decode_NumericString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'TeletexString' ->
+ emit({"?RT_PER:decode_TeletexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:decode_VideotexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'UTCTime' ->
+ emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'GeneralizedTime' ->
+ emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'GraphicString' ->
+ emit({"?RT_PER:decode_GraphicString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VisibleString' ->
+ emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'GeneralString' ->
+ emit({"?RT_PER:decode_GeneralString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'PrintableString' ->
+ emit({"?RT_PER:decode_PrintableString(",BytesVar,",",{asis,Constraint},")"});
+ 'IA5String' ->
+ emit({"?RT_PER:decode_IA5String(",BytesVar,",",{asis,Constraint},")"});
+ 'BMPString' ->
+ emit({"?RT_PER:decode_BMPString(",BytesVar,",",{asis,Constraint},")"});
+ 'UniversalString' ->
+ emit({"?RT_PER:decode_UniversalString(",BytesVar,",",{asis,Constraint},")"});
+ 'ANY' ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",",
+ {asis,Constraint}, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ _ ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",[])"])
+ end;
+ Other ->
+ exit({'cant decode' ,Other})
+ end.
+
+
+is_already_generated(Operation,Name) ->
+ case get(class_default_type) of
+ undefined ->
+ put(class_default_type,[{Operation,Name}]),
+ false;
+ GeneratedList ->
+ case lists:member({Operation,Name},GeneratedList) of
+ true ->
+ true;
+ false ->
+ put(class_default_type,[{Operation,Name}|GeneratedList]),
+ false
+ end
+ end.
+
+get_class_fields(#classdef{typespec=ObjClass}) ->
+ ObjClass#objectclass.fields;
+get_class_fields(#objectclass{fields=Fields}) ->
+ Fields;
+get_class_fields(_) ->
+ [].
+
+
+get_object_field(Name,ObjectFields) ->
+ case lists:keysearch(Name,1,ObjectFields) of
+ {value,Field} -> Field;
+ false -> false
+ end.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl
new file mode 100644
index 0000000000..70a017ac6a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl
@@ -0,0 +1,1811 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_per_rt2ct.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_gen_per_rt2ct).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-export([pgen/4,gen_dec_prim/3,gen_encode_prim/4]).
+-export([gen_obj_code/3,gen_objectset_code/2]).
+-export([gen_decode/2, gen_decode/3]).
+-export([gen_encode/2, gen_encode/3]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+-import(asn1ct_gen_per, [is_already_generated/2,more_genfields/1,
+ get_class_fields/1,get_object_field/2]).
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%% Generate ENCODING ******************************
+%%****************************************x
+
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+
+gen_encode(Erules,Typename,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTypename = [Cname|Typename],
+ gen_encode(Erules,NewTypename,Type);
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+ ", ObjFun";
+ false ->
+ ""
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ case InnerType of
+ 'SET' ->
+ true;
+ 'SEQUENCE' ->
+ true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}",ObjFun,") ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",ObjFun,");",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val",ObjFun,
+ ") ->",nl}),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case InnerType of
+ 'SET' -> true;
+ 'SEQUENCE' -> true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),"'({'",asn1ct_gen:list2name(Typename),"',Val}) ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val) ->",nl}),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_encode_prim(Erules,Def,"false"),
+ emit({".",nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_encode_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"false"),
+ emit({".",nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'enc_",Etype,"'(Val).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'enc_",Etype,"'(Val).",nl,nl});
+ #typereference{val=Ename} ->
+ emit({"'enc_",Ename,"'(Val).",nl,nl});
+ {notype,_} ->
+ emit({"'enc_",InnerType,"'(Val).",nl,nl})
+ end.
+
+
+gen_encode_prim(Erules,D,DoTag) ->
+ Value = case asn1ct_name:active(val) of
+ true ->
+ asn1ct_gen:mk_var(asn1ct_name:curr(val));
+ false ->
+ "Val"
+ end,
+ gen_encode_prim(Erules,D,DoTag,Value).
+
+
+
+
+
+gen_encode_prim(_Erules,D,_DoTag,Value) when record(D,type) ->
+ Constraint = D#type.constraint,
+ case D#type.def of
+ 'INTEGER' ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ emit([" %%INTEGER with effective constraint: ",
+ {asis,EffectiveConstr},nl]),
+ emit_enc_integer(EffectiveConstr,Value);
+ {'INTEGER',NamedNumberList} ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ %% maybe an emit_enc_NNL_integer
+ emit([" %%INTEGER with effective constraint: ",
+ {asis,EffectiveConstr},nl]),
+ emit_enc_integer_NNL(EffectiveConstr,Value,NamedNumberList);
+ {'ENUMERATED',{Nlist1,Nlist2}} ->
+ NewList = lists:concat([[{0,X}||{X,_} <- Nlist1],['EXT_MARK'],[{1,X}||{X,_} <- Nlist2]]),
+ NewC = [{'ValueRange',{0,length(Nlist1)-1}}],
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NewC, NewList++[{asn1_enum,length(Nlist1)-1}], 0);
+ {'ENUMERATED',NamedNumberList} ->
+ NewList = [X||{X,_} <- NamedNumberList],
+ NewC = effective_constraint(integer,
+ [{'ValueRange',
+ {0,length(NewList)-1}}]),
+ NewVal = enc_enum_cases(Value,NewList),
+ emit_enc_integer(NewC,NewVal);
+ {'BIT STRING',NamedNumberList} ->
+ EffectiveC = effective_constraint(bitstring,Constraint),
+ case EffectiveC of
+ 0 -> emit({"[]"});
+ _ ->
+ emit({"?RT_PER:encode_bit_string(",
+ {asis,EffectiveC},",",Value,",",
+ {asis,NamedNumberList},")"})
+ end;
+ 'NULL' ->
+ emit({"?RT_PER:encode_null(",Value,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:encode_object_identifier(",Value,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:encode_ObjectDescriptor(",{asis,Constraint},
+ ",",Value,")"});
+ 'BOOLEAN' ->
+% emit({"?RT_PER:encode_boolean(",Value,")"});
+ emit({"case ",Value," of",nl,
+% " true -> {bits,1,1};",nl,
+ " true -> [1];",nl,
+% " false -> {bits,1,0};",nl,
+ " false -> [0];",nl,
+ " _ -> exit({error,{asn1,{encode_boolean,",Value,"}}})",nl,
+ "end"});
+ 'OCTET STRING' ->
+ emit_enc_octet_string(Constraint,Value);
+
+ 'NumericString' ->
+ emit_enc_known_multiplier_string('NumericString',Constraint,Value);
+ 'TeletexString' ->
+ emit({"?RT_PER:encode_TeletexString(",{asis,Constraint},",",Value,")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:encode_VideotexString(",{asis,Constraint},",",Value,")"});
+ 'UTCTime' ->
+ emit_enc_known_multiplier_string('VisibleString',Constraint,Value);
+ 'GeneralizedTime' ->
+ emit_enc_known_multiplier_string('VisibleString',Constraint,Value);
+ 'GraphicString' ->
+ emit({"?RT_PER:encode_GraphicString(",{asis,Constraint},",",Value,")"});
+ 'VisibleString' ->
+ emit_enc_known_multiplier_string('VisibleString',Constraint,Value);
+ 'GeneralString' ->
+ emit({"?RT_PER:encode_GeneralString(",{asis,Constraint},",",Value,")"});
+ 'PrintableString' ->
+ emit_enc_known_multiplier_string('PrintableString',Constraint,Value);
+ 'IA5String' ->
+ emit_enc_known_multiplier_string('IA5String',Constraint,Value);
+ 'BMPString' ->
+ emit_enc_known_multiplier_string('BMPString',Constraint,Value);
+ 'UniversalString' ->
+ emit_enc_known_multiplier_string('UniversalString',Constraint,Value);
+ 'ANY' ->
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ Value, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ NewValue = case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ _ -> Value
+ end,
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ NewValue, ")"]);
+ XX ->
+ exit({asn1_error,nyi,XX})
+ end.
+
+emit_enc_known_multiplier_string(StringType,C,Value) ->
+ SizeC =
+ case get_constraint(C,'SizeConstraint') of
+ L when list(L) -> {lists:min(L),lists:max(L)};
+ L -> L
+ end,
+ PAlphabC = get_constraint(C,'PermittedAlphabet'),
+ case {StringType,PAlphabC} of
+ {'UniversalString',{_,_}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with "
+ "PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with "
+ "PermittedAlphabet constraint"}}});
+ _ -> ok
+ end,
+ NumBits = get_NumBits(C,StringType),
+ CharOutTab = get_CharOutTab(C,StringType),
+ %% NunBits and CharOutTab for chars_encode
+ emit_enc_k_m_string(StringType,SizeC,NumBits,CharOutTab,Value).
+
+emit_enc_k_m_string(_StringType,0,_NumBits,_CharOutTab,_Value) ->
+ emit({"[]"});
+emit_enc_k_m_string(StringType,SizeC,NumBits,CharOutTab,Value) ->
+ emit({"?RT_PER:encode_known_multiplier_string(",{asis,StringType},",",
+ {asis,SizeC},",",NumBits,",",{asis,CharOutTab},",",Value,")"}).
+
+emit_dec_known_multiplier_string(StringType,C,BytesVar) ->
+ SizeC = get_constraint(C,'SizeConstraint'),
+ PAlphabC = get_constraint(C,'PermittedAlphabet'),
+ case {StringType,PAlphabC} of
+ {'BMPString',{_,_}} ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet "
+ "constraint"}}});
+ _ ->
+ ok
+ end,
+ NumBits = get_NumBits(C,StringType),
+ CharInTab = get_CharInTab(C,StringType),
+ case SizeC of
+ 0 ->
+ emit({"{[],",BytesVar,"}"});
+ _ ->
+ emit({"?RT_PER:decode_known_multiplier_string(",
+ {asis,StringType},",",{asis,SizeC},",",NumBits,
+ ",",{asis,CharInTab},",",BytesVar,")"})
+ end.
+
+
+%% copied from run time module
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B =< 4 -> 4;
+ B when B =< 8 -> 8;
+ B when B =< 16 -> 16;
+ B when B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+%% copied from run time module
+
+emit_enc_octet_string(Constraint,Value) ->
+ case get_constraint(Constraint,'SizeConstraint') of
+ 0 ->
+ emit({" []"});
+ 1 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+ emit({" [",{curr,tmpval},"] = ",Value,",",nl}),
+% emit({" {bits,8,",{curr,tmpval},"}",nl}),
+ emit({" [10,8,",{curr,tmpval},"]",nl}),
+ emit(" end");
+ 2 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+ emit({" [",{curr,tmpval},",",{next,tmpval},"] = ",
+ Value,",",nl}),
+% emit({" [{bits,8,",{curr,tmpval},"},{bits,8,",
+% {next,tmpval},"}]",nl}),
+ emit({" [[10,8,",{curr,tmpval},"],[10,8,",
+ {next,tmpval},"]]",nl}),
+ emit(" end"),
+ asn1ct_name:new(tmpval);
+ Sv when integer(Sv),Sv =< 256 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+% emit({" case length(",Value,") == ",Sv," of",nl}),
+ emit({" case length(",Value,") of",nl}),
+ emit({" ",{curr,tmpval}," when ",{curr,tmpval}," == ",Sv," -> [2,20,",{curr,tmpval},",",Value,"];",nl}),
+ emit({" _ -> exit({error,{value_out_of_bounds,",Value,"}})",
+ nl," end",nl}),
+ emit(" end");
+ Sv when integer(Sv),Sv =< 65535 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+% emit({" case length(",Value,") == ",Sv," of",nl}),
+ emit({" case length(",Value,") of",nl}),
+% emit({" true -> [align,{octets,",Value,"}];",nl}),
+ emit({" ",{curr,tmpval}," when ",{curr,tmpval}," == ",Sv," -> [2,21,",{curr,tmpval},",",Value,"];",nl}),
+ emit({" _ -> exit({error,{value_out_of_bounds,",Value,"}})",
+ nl," end",nl}),
+ emit(" end");
+ C ->
+ emit({" ?RT_PER:encode_octet_string(",{asis,C},",false,",Value,")",nl})
+ end.
+
+emit_dec_octet_string(Constraint,BytesVar) ->
+ case get_constraint(Constraint,'SizeConstraint') of
+ 0 ->
+ emit({" {[],",BytesVar,"}",nl});
+ {_,0} ->
+ emit({" {[],",BytesVar,"}",nl});
+ C ->
+ emit({" ?RT_PER:decode_octet_string(",BytesVar,",",
+ {asis,C},",false)",nl})
+ end.
+
+emit_enc_integer_case(Value) ->
+ case get(component_type) of
+ {true,#'ComponentType'{prop=Prop}} ->
+ emit({" begin",nl}),
+ case Prop of
+ Opt when Opt=='OPTIONAL';
+ tuple(Opt),element(1,Opt)=='DEFAULT' ->
+ emit({" case ",Value," of",nl}),
+ ok;
+ _ ->
+ emit({" ",{curr,tmpval},"=",Value,",",nl}),
+ emit({" case ",{curr,tmpval}," of",nl}),
+ asn1ct_name:new(tmpval)
+ end;
+% asn1ct_name:new(tmpval);
+ _ ->
+ emit({" case ",Value," of ",nl})
+ end.
+emit_enc_integer_end_case() ->
+ case get(component_type) of
+ {true,_} ->
+ emit({nl," end"}); % end of begin ... end
+ _ -> ok
+ end.
+
+
+emit_enc_integer_NNL(C,Value,NNL) ->
+ EncVal = enc_integer_NNL_cases(Value,NNL),
+ emit_enc_integer(C,EncVal).
+
+enc_integer_NNL_cases(Value,NNL) ->
+ asn1ct_name:new(tmpval),
+ TmpVal = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ Cases=enc_integer_NNL_cases1(NNL),
+ lists:flatten(io_lib:format("(case ~s of "++Cases++
+ "~s when atom(~s)->exit({error,{asn1,{namednumber,~s}}});_->~s end)",[Value,TmpVal,TmpVal,TmpVal,Value])).
+
+enc_integer_NNL_cases1([{NNo,No}|Rest]) ->
+ io_lib:format("~w->~w;",[NNo,No])++enc_integer_NNL_cases1(Rest);
+enc_integer_NNL_cases1([]) ->
+ "".
+
+emit_enc_integer([{'SingleValue',Int}],Value) ->
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),% emit([" case ",Value," of",nl]),
+ emit([" ",Int," -> [];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+emit_enc_integer([{_,{Lb,Ub},_Range,{bits,NoBs}}],Value) -> % Range =< 255
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),
+ emit([" ",{curr,tmpval}," when ",{curr,tmpval},"=<",Ub,",",
+ {curr,tmpval},">=",Lb," ->",nl]),
+ emit([" [10,",NoBs,",",{curr,tmpval},"-",Lb,"];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+emit_enc_integer([{_,{Lb,Ub},Range,_}],Value) when Range =< 256 ->
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),
+ emit([" ",{curr,tmpval}," when ",{curr,tmpval},"=<",Ub,",",
+ {curr,tmpval},">=",Lb," ->",nl]),
+ emit([" [20,1,",{curr,tmpval},"-",Lb,"];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+emit_enc_integer([{_,{Lb,Ub},Range,_}],Value) when Range =< 65536 ->
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),
+ emit([" ",{curr,tmpval}," when ",{curr,tmpval},"=<",Ub,",",
+ {curr,tmpval},">=",Lb," ->",nl]),
+ emit([" [20,2,<<(",{curr,tmpval},"-",Lb,"):16>>];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+
+emit_enc_integer(C,Value) ->
+ emit({" ?RT_PER:encode_integer(",{asis,C},",",Value,")"}).
+
+
+
+
+enc_enum_cases(Value,NewList) ->
+ asn1ct_name:new(tmpval),
+ TmpVal = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ Cases=enc_enum_cases1(NewList),
+ lists:flatten(io_lib:format("(case ~s of "++Cases++
+ "~s ->exit({error,"
+ "{asn1,{enumerated,~s}}})"
+ " end)",
+ [Value,TmpVal,TmpVal])).
+enc_enum_cases1(NNL) ->
+ enc_enum_cases1(NNL,0).
+enc_enum_cases1([H|T],Index) ->
+ io_lib:format("~w->~w;",[H,Index])++enc_enum_cases1(T,Index+1);
+enc_enum_cases1([],_) ->
+ "".
+
+
+emit_enc_enumerated_cases(C, [H], Count) ->
+ emit_enc_enumerated_case(C, H, Count),
+ emit([";",nl,"EnumVal -> exit({error,{asn1, {enumerated_not_in_range, EnumVal}}})"]),
+ emit([nl,"end"]);
+emit_enc_enumerated_cases(C, ['EXT_MARK'|T], _Count) ->
+ emit_enc_enumerated_cases(C, T, 0);
+emit_enc_enumerated_cases(C, [H1,H2|T], Count) ->
+ emit_enc_enumerated_case(C, H1, Count),
+ emit([";",nl]),
+ emit_enc_enumerated_cases(C, [H2|T], Count+1).
+
+
+%% The function clauses matching on tuples with first element
+%% asn1_enum, 1 or 0 and the atom 'EXT_MARK' are for ENUMERATED
+%% with extension mark.
+emit_enc_enumerated_case(_C, {asn1_enum,High}, _) ->
+ %% ENUMERATED with extensionmark
+ %% value higher than the extension base and not
+ %% present in the extension range.
+ emit(["{asn1_enum,EnumV} when integer(EnumV), EnumV > ",High," -> ",
+ "[1,?RT_PER:encode_small_number(EnumV)]"]);
+emit_enc_enumerated_case(_C, 'EXT_MARK', _Count) ->
+ %% ENUMERATED with extensionmark
+ true;
+emit_enc_enumerated_case(_C, {1,EnumName}, Count) ->
+ %% ENUMERATED with extensionmark
+ %% values higher than extension root
+ emit(["'",EnumName,"' -> [1,?RT_PER:encode_small_number(",Count,")]"]);
+emit_enc_enumerated_case(C, {0,EnumName}, Count) ->
+ %% ENUMERATED with extensionmark
+ %% values within extension root
+ emit(["'",EnumName,"' -> [0,?RT_PER:encode_integer(",{asis,C},", ",Count,")]"]);
+
+%% This clause is invoked in case of an ENUMERATED without extension mark
+emit_enc_enumerated_case(_C, EnumName, Count) ->
+ emit(["'",EnumName,"' -> ",Count]).
+
+
+get_constraint([{Key,V}],Key) ->
+ V;
+get_constraint([],_) ->
+ no;
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+get_constraints(L=[{Key,_}],Key) ->
+ L;
+get_constraints([],_) ->
+ [];
+get_constraints(C,Key) ->
+ {value,L} = keysearch_allwithkey(Key,1,C,[]),
+ L.
+
+keysearch_allwithkey(Key,Ix,C,Acc) ->
+ case lists:keysearch(Key,Ix,C) of
+ false ->
+ {value,Acc};
+ {value,T} ->
+ RestC = lists:delete(T,C),
+ keysearch_allwithkey(Key,Ix,RestC,[T|Acc])
+ end.
+
+%% effective_constraint(Type,C)
+%% Type = atom()
+%% C = [C1,...]
+%% C1 = {'SingleValue',SV} | {'ValueRange',VR} | {atom(),term()}
+%% SV = integer() | [integer(),...]
+%% VR = {Lb,Ub}
+%% Lb = 'MIN' | integer()
+%% Ub = 'MAX' | integer()
+%% Returns a single value if C only has a single value constraint, and no
+%% value range constraints, that constrains to a single value, otherwise
+%% returns a value range that has the lower bound set to the lowest value
+%% of all single values and lower bound values in C and the upper bound to
+%% the greatest value.
+effective_constraint(integer,[C={{_,_},_}|_Rest]) -> % extension
+ [C]; %% [C|effective_constraint(integer,Rest)]; XXX what is possible ???
+effective_constraint(integer,C) ->
+ SVs = get_constraints(C,'SingleValue'),
+ SV = effective_constr('SingleValue',SVs),
+ VRs = get_constraints(C,'ValueRange'),
+ VR = effective_constr('ValueRange',VRs),
+ CRange = greatest_common_range(SV,VR),
+ pre_encode(integer,CRange);
+effective_constraint(bitstring,C) ->
+% Constr=get_constraints(C,'SizeConstraint'),
+% case Constr of
+% [] -> no;
+% [{'SizeConstraint',Val}] -> Val;
+% Other -> Other
+% end;
+ get_constraint(C,'SizeConstraint');
+effective_constraint(Type,C) ->
+ io:format("Effective constraint for ~p, not implemented yet.~n",[Type]),
+ C.
+
+effective_constr(_,[]) ->
+ [];
+effective_constr('SingleValue',List) ->
+ SVList = lists:flatten(lists:map(fun(X)->element(2,X)end,List)),
+ case lists:usort(SVList) of
+ [N] ->
+ [{'SingleValue',N}];
+ L when list(L) ->
+ [{'ValueRange',{hd(L),lists:last(L)}}]
+ end;
+effective_constr('ValueRange',List) ->
+ LBs = lists:map(fun({_,{Lb,_}})-> Lb end,List),
+ UBs = lists:map(fun({_,{_,Ub}})-> Ub end,List),
+ Lb = least_Lb(LBs),
+ [{'ValueRange',{Lb,lists:max(UBs)}}].
+
+greatest_common_range([],VR) ->
+ VR;
+greatest_common_range(SV,[]) ->
+ SV;
+greatest_common_range([{_,Int}],[{_,{'MIN',Ub}}]) when integer(Int),
+ Int > Ub ->
+ [{'ValueRange',{'MIN',Int}}];
+greatest_common_range([{_,Int}],[{_,{Lb,Ub}}]) when integer(Int),
+ Int < Lb ->
+ [{'ValueRange',{Int,Ub}}];
+greatest_common_range([{_,Int}],VR=[{_,{_Lb,_Ub}}]) when integer(Int) ->
+ VR;
+greatest_common_range([{_,L}],[{_,{Lb,Ub}}]) when list(L) ->
+ Min = least_Lb([Lb|L]),
+ Max = greatest_Ub([Ub|L]),
+ [{'ValueRange',{Min,Max}}].
+
+
+least_Lb(L) ->
+ case lists:member('MIN',L) of
+ true -> 'MIN';
+ _ -> lists:min(L)
+ end.
+
+greatest_Ub(L) ->
+ case lists:member('MAX',L) of
+ true -> 'MAX';
+ _ -> lists:max(L)
+ end.
+
+% effective_constraint1('SingleValue',List) ->
+% SVList = lists:map(fun(X)->element(2,X)end,List),
+% sv_effective_constraint(hd(SVList),tl(SVList));
+% effective_constraint1('ValueRange',List) ->
+% VRList = lists:map(fun(X)->element(2,X)end,List),
+% vr_effective_constraint(lists:map(fun(X)->element(1,X)end,VRList),
+% lists:map(fun(X)->element(2,X)end,VRList)).
+
+%% vr_effective_constraint/2
+%% Gets all LowerEndPoints and UpperEndPoints as arguments
+%% Returns {'ValueRange',{Lb,Ub}} where Lb is the highest value of
+%% the LowerEndPoints and Ub is the lowest value of the UpperEndPoints,
+%% i.e. the intersection of all value ranges.
+% vr_effective_constraint(Mins,Maxs) ->
+% Lb=lists:foldl(fun(X,'MIN') when integer(X) -> X;
+% (X,'MIN') -> 'MIN';
+% (X,AccIn) when integer(X),X >= AccIn -> X;
+% (X,AccIn) -> AccIn
+% end,hd(Mins),tl(Mins)),
+% Ub = lists:min(Maxs),
+% {'ValueRange',{Lb,Ub}}.
+
+
+% sv_effective_constraint(SV,[]) ->
+% {'SingleValue',SV};
+% sv_effective_constraint([],_) ->
+% exit({error,{asn1,{illegal_single_value_constraint}}});
+% sv_effective_constraint(SV,[SV|Rest]) ->
+% sv_effective_constraint(SV,Rest);
+% sv_effective_constraint(Int,[SV|Rest]) when integer(Int),list(SV) ->
+% case lists:member(Int,SV) of
+% true ->
+% sv_effective_constraint(Int,Rest);
+% _ ->
+% exit({error,{asn1,{illegal_single_value_constraint}}})
+% end;
+% sv_effective_constraint(SV,[Int|Rest]) when integer(Int),list(SV) ->
+% case lists:member(Int,SV) of
+% true ->
+% sv_effective_constraint(Int,Rest);
+% _ ->
+% exit({error,{asn1,{illegal_single_value_constraint}}})
+% end;
+% sv_effective_constraint(SV1,[SV2|Rest]) when list(SV1),list(SV2) ->
+% sv_effective_constraint(common_set(SV1,SV2),Rest);
+% sv_effective_constraint(_,_) ->
+% exit({error,{asn1,{illegal_single_value_constraint}}}).
+
+%% common_set/2
+%% Two lists as input
+%% Returns the list with all elements that are common for both
+%% input lists
+% common_set(SV1,SV2) ->
+% lists:filter(fun(X)->lists:member(X,SV1) end,SV2).
+
+
+
+pre_encode(integer,[]) ->
+ [];
+pre_encode(integer,C=[{'SingleValue',_}]) ->
+ C;
+pre_encode(integer,C=[{'ValueRange',VR={Lb,Ub}}]) when integer(Lb),integer(Ub)->
+ Range = Ub-Lb+1,
+ if
+ Range =< 255 ->
+ NoBits = no_bits(Range),
+ [{'ValueRange',VR,Range,{bits,NoBits}}];
+ Range =< 256 ->
+ [{'ValueRange',VR,Range,{octets,1}}];
+ Range =< 65536 ->
+ [{'ValueRange',VR,Range,{octets,2}}];
+ true ->
+ C
+ end;
+pre_encode(integer,C) ->
+ C.
+
+no_bits(2) -> 1;
+no_bits(N) when N=<4 -> 2;
+no_bits(N) when N=<8 -> 3;
+no_bits(N) when N=<16 -> 4;
+no_bits(N) when N=<32 -> 5;
+no_bits(N) when N=<64 -> 6;
+no_bits(N) when N=<128 -> 7;
+no_bits(N) when N=<255 -> 8.
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=Mod,type=ClassName} =
+ Def#'Object'.classname,
+ Class = asn1_db:dbget(Mod,ClassName),
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+% gen_encode_objectfields(Class#classdef.typespec,ObjName,Fields,[]),
+ gen_encode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+% gen_decode_objectfields(Class#classdef.typespec,ObjName,Fields,[]),
+ gen_decode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed),
+ emit(nl);
+gen_obj_code(_Erules,_Module,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(V) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",V,",_RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, RestPrimFieldName) ->",nl]),
+ MaybeConstr =
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit(" <<>>"),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Val,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_encode_objectfields(ClassName,[_C|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, Dummy) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_encode_prim(per,Def,"false","Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val)"}),
+% [{['enc_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,"'(Val)"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_encode_objectfields(C,O,[_|T],Acc) ->
+% gen_encode_objectfields(C,O,T,Acc);
+% gen_encode_objectfields(_,_,[],Acc) ->
+% Acc.
+
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ Name = lists:concat(["enc_",TypeDef#typedef.name]),
+ emit({Name,"(Val) ->",nl}),
+ Def = TypeDef#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+ gen_encode_constr_type(Erules,Rest)
+ end;
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_encode_prim(per,Def,"false",
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val)"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val)"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Val)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(per,Type,"false","Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val)",nl]),
+ []
+ end.
+
+
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Bytes) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},",",Bytes,
+ ",_,_RestPrimFieldName) ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes, _, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes"),
+ gen_decode_default_call(ClassName,Name,"Bytes",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes"),
+ gen_decode_field_call(ObjName,Name,"Bytes",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,_,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,_,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, telltype, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, telltype, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_C|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_dec_prim(per,Def,Bytes),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",telltype)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,", telltype)"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,", telltype)"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,", telltype)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(per,Type,Bytes),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ []
+ end.
+
+%%%%%%%%%%%%%%%
+
+% gen_decode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, RestPrimFieldName) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_dec_prim(per,Def,"Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'dec_",ObjName,'_',FieldName,
+% "'(Val, Telltype)"}),
+% [{['dec_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'dec_",TypeName,
+% "'(Val, Telltype)"}),
+% [];
+% TypeName ->
+% emit({" 'dec_",TypeName,"'(Val, Telltype)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+% "'(H, Val, Telltype, T)"});
+% TypeName ->
+% emit({indent(3),"'dec_",TypeName,
+% "'(H, Val, Telltype, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} ->
+% []
+% end,
+% gen_decode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_decode_objectfields(C,O,[_|T],CAcc) ->
+% gen_decode_objectfields(C,O,T,CAcc);
+% gen_decode_objectfields(_,_,[],CAcc) ->
+% CAcc.
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+ emit({Name,"(Bytes,_) ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+% is_typefield(Fields,FieldName) ->
+% case lists:keysearch(FieldName,2,Fields) of
+% {value,Field} ->
+% case element(1,Field) of
+% typefield ->
+% true;
+% Other ->
+% {false,Other}
+% end;
+% _ ->
+% false
+% end.
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+%% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = (ClassDef#classdef.typespec)#objectclass.fields,
+ InternalFuncs=
+ gen_objset_enc(ObjSetName,UniqueFName,Set,ClassName,
+ ClassFields,1,[]),
+ gen_objset_dec(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+gen_objset_enc(_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc++Acc);
+gen_objset_enc(ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,_}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc++Acc;
+gen_objset_enc(ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Val, _) ->",nl}),
+ emit({indent(6),"Size = if",nl}),
+ emit({indent(9),"list(Val) -> length(Val);",nl}),
+ emit({indent(9),"true -> size(Val)",nl}),
+ emit({indent(6),"end,",nl}),
+ emit({indent(6),"if",nl}),
+ emit({indent(9),"Size < 256 ->",nl}),
+ emit({indent(12),"[20,Size,Val];",nl}),
+ emit({indent(9),"true ->",nl}),
+ emit({indent(12),"[21,<<Size:16>>,Val]",nl}),
+ emit({indent(6),"end",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],ObjSetName,NthObj) ->
+ InternalDefFunName=asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+emit_inner_of_fun(TDef=#typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(per,Type,dotag,"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val)"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val)"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name},_) ->
+ emit({indent(12),"'enc_",Name,"'(Val)"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_encode_prim(erules,Type,dotag,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val)"})
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],ClName,
+ ClFields,NthObj)->
+
+ emit({"'getdec_",ObjSName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit({" fun 'dec_",ObjName,"'/4"}),
+ NthObj
+ end,
+ emit({";",nl}),
+ gen_objset_dec(ObjSName,UniqueName,[T|Rest],ClName,ClFields,NewNthObj);
+gen_objset_dec(ObjSetName,UniqueName,[{ObjName,Val,Fields}],_ClName,
+ ClFields,NthObj) ->
+
+ emit({"'getdec_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit({" fun 'dec_",ObjName,"'/4"})
+ end,
+ emit({".",nl,nl}),
+ ok;
+gen_objset_dec(ObjSetName,_,['EXTENSIONMARK'],_ClName,_ClFields,
+ _NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(Attr1, Bytes, _, _) ->",nl}),
+ %% emit({indent(6),"?RT_PER:decode_open_type(Bytes,[])",nl}),
+ emit({indent(6),"{Bytes,Attr1}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ ok;
+gen_objset_dec(_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Fields,[_|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,[],_,NthObj) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ NthObj.
+
+emit_inner_of_decfun(#typedef{name={ExtName,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(per,Type,"Val"),
+ 0;
+ {constructed,bif} ->
+ emit({indent(12),"'dec_",
+ asn1ct_gen:list2name(InternalDefFunName),"'(Val)"}),
+ 1;
+ _ ->
+ emit({indent(12),"'",ExtName,"':'dec_",Name,
+ "'(Val, telltype)"}),
+ 0
+ end;
+emit_inner_of_decfun(#typedef{name=Name},_) ->
+ emit({indent(12),"'dec_",Name,"'(Val, telltype)"}),
+ 0;
+emit_inner_of_decfun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_dec_prim(erules,Type,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+ T,"'(Val)"})
+ end,
+ 0.
+
+
+gen_internal_funcs(_Erules,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit([nl,nl,"'dec_",TypeDef#typedef.name,"'(Bytes) ->",nl]),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+
+%% DECODING *****************************
+%%***************************************
+
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ D = Type,
+ emit({nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes,_) ->",nl}),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,D).
+
+gen_decode(Erules,Tname,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTname = [Cname|Tname],
+ gen_decode(Erules,NewTname,Type);
+
+gen_decode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({nl,"'dec_",asn1ct_gen:list2name(Typename),
+ "'(Bytes,_",ObjFun,") ->",nl}),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+dbdec(Type) when list(Type)->
+ demit({"io:format(\"decoding: ",asn1ct_gen:list2name(Type),"~w~n\",[Bytes]),",nl});
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_dec_prim(Erules,Def,"Bytes"),
+ emit({".",nl,nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_dec_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"Bytes"),
+ emit({".",nl,nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(Bytes,telltype)"}),
+ emit({".",nl,nl});
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ Other ->
+ exit({error,{asn1,{unknown,Other}}})
+ end.
+
+
+
+gen_dec_prim(_Erules,Att,BytesVar) ->
+ Typename = Att#type.def,
+ Constraint = Att#type.constraint,
+ case Typename of
+ 'INTEGER' ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ emit_dec_integer(EffectiveConstr,BytesVar);
+% emit({"?RT_PER:decode_integer(",BytesVar,",",
+% {asis,EffectiveConstr},")"});
+ {'INTEGER',NamedNumberList} ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ emit_dec_integer(EffectiveConstr,BytesVar,NamedNumberList);
+% emit({"?RT_PER:decode_integer(",BytesVar,",",
+% {asis,EffectiveConstr},",",
+% {asis,NamedNumberList},")"});
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_PER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},")"});
+ _ ->
+ emit({"?RT_PER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},")"})
+ end;
+ 'NULL' ->
+ emit({"?RT_PER:decode_null(",
+ BytesVar,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:decode_object_identifier(",
+ BytesVar,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:decode_ObjectDescriptor(",
+ BytesVar,")"});
+ {'ENUMERATED',{NamedNumberList1,NamedNumberList2}} ->
+ NewTup = {list_to_tuple([X||{X,_} <- NamedNumberList1]),
+ list_to_tuple([X||{X,_} <- NamedNumberList2])},
+ NewC = [{'ValueRange',{0,size(element(1,NewTup))-1}}],
+ emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+ {asis,NewC},",",
+ {asis,NewTup},")"});
+ {'ENUMERATED',NamedNumberList} ->
+ %NewTup = list_to_tuple([X||{X,Y} <- NamedNumberList]),
+ NewNNL = [X||{X,_} <- NamedNumberList],
+ NewC = effective_constraint(integer,
+ [{'ValueRange',{0,length(NewNNL)-1}}]),
+ emit_dec_enumerated(BytesVar,NewC,NewNNL);
+% emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+% {asis,NewC},",",
+% {asis,NewTup},")"});
+ 'BOOLEAN'->
+ emit({"?RT_PER:decode_boolean(",BytesVar,")"});
+ 'OCTET STRING' ->
+ emit_dec_octet_string(Constraint,BytesVar);
+% emit({"?RT_PER:decode_octet_string(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'NumericString' ->
+ emit_dec_known_multiplier_string('NumericString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_NumericString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'TeletexString' ->
+ emit({"?RT_PER:decode_TeletexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:decode_VideotexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'UTCTime' ->
+ emit_dec_known_multiplier_string('VisibleString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'GeneralizedTime' ->
+ emit_dec_known_multiplier_string('VisibleString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'GraphicString' ->
+ emit({"?RT_PER:decode_GraphicString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VisibleString' ->
+ emit_dec_known_multiplier_string('VisibleString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'GeneralString' ->
+ emit({"?RT_PER:decode_GeneralString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'PrintableString' ->
+ emit_dec_known_multiplier_string('PrintableString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_PrintableString(",BytesVar,",",{asis,Constraint},")"});
+ 'IA5String' ->
+ emit_dec_known_multiplier_string('IA5String',Constraint,BytesVar);
+% emit({"?RT_PER:decode_IA5String(",BytesVar,",",{asis,Constraint},")"});
+ 'BMPString' ->
+ emit_dec_known_multiplier_string('BMPString',Constraint,BytesVar);
+% emit({"?RT_PER:decode_BMPString(",BytesVar,",",{asis,Constraint},")"});
+ 'UniversalString' ->
+ emit_dec_known_multiplier_string('UniversalString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_UniversalString(",BytesVar,",",{asis,Constraint},")"});
+ 'ANY' ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",",
+ {asis,Constraint}, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ _ ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",[])"])
+ end;
+ Other ->
+ exit({'cant decode' ,Other})
+ end.
+
+
+emit_dec_integer(C,BytesVar,NNL) ->
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(buffer),
+ Tmpterm = asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ Buffer = asn1ct_gen:mk_var(asn1ct_name:curr(buffer)),
+ emit({" begin {",{curr,tmpterm},",",{curr,buffer},"} = ",nl}),
+ emit_dec_integer(C,BytesVar),
+ emit({",",nl," case ",Tmpterm," of",nl}),
+ lists:map(fun({Name,Int})->emit({" ",Int," -> {",{asis,Name},",",
+ Buffer,"};",nl});
+ (_)-> exit({error,{asn1,{"error in named number list",NNL}}})
+ end,
+ NNL),
+ emit({" _ -> {",Tmpterm,",",Buffer,"}",nl}),
+ emit({" end",nl}), % end of case
+ emit(" end"). % end of begin
+
+emit_dec_integer([{'SingleValue',Int}],BytesVar) when integer(Int) ->
+ emit(["{",Int,",",BytesVar,"}"]);
+emit_dec_integer([{_,{Lb,_Ub},_Range,{BitsOrOctets,N}}],BytesVar) ->
+ GetBorO =
+ case BitsOrOctets of
+ bits -> "getbits";
+ _ -> "getoctets"
+ end,
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(tmpremain),
+ emit({" begin",nl," {",{curr,tmpterm},",",{curr,tmpremain},"}=",
+ "?RT_PER:",GetBorO,"(",BytesVar,",",N,"),",nl}),
+ emit({" {",{curr,tmpterm},"+",Lb,",",{curr,tmpremain},"}",nl,
+ " end"});
+emit_dec_integer([{_,{'MIN',_}}],BytesVar) ->
+ emit({"?RT_PER:decode_unconstrained_number(",BytesVar,")"});
+emit_dec_integer([{_,{Lb,'MAX'}}],BytesVar) ->
+ emit({"?RT_PER:decode_semi_constrained_number(",BytesVar,",",Lb,")"});
+emit_dec_integer([{'ValueRange',VR={Lb,Ub}}],BytesVar) ->
+ Range = Ub-Lb+1,
+ emit({"?RT_PER:decode_constrained_number(",BytesVar,",",
+ {asis,VR},",",Range,")"});
+emit_dec_integer(C=[{Rc,_}],BytesVar) when tuple(Rc) ->
+ emit({"?RT_PER:decode_integer(",BytesVar,",",{asis,C},")"});
+emit_dec_integer(_,BytesVar) ->
+ emit({"?RT_PER:decode_unconstrained_number(",BytesVar,")"}).
+
+
+emit_dec_enumerated(BytesVar,C,NamedNumberList) ->
+ emit_dec_enumerated_begin(),% emits a begin if component
+ asn1ct_name:new(tmpterm),
+ Tmpterm = asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ asn1ct_name:new(tmpremain),
+ Tmpremain = asn1ct_gen:mk_var(asn1ct_name:curr(tmpremain)),
+ emit({" {",{curr,tmpterm},",",{curr,tmpremain},"} =",nl}),
+ emit_dec_integer(C,BytesVar),
+ emit({",",nl," case ",Tmpterm," of "}),
+% Cases=lists:flatten(dec_enumerated_cases(NamedNumberList,asn1ct_gen:mk_var(asn1ct_name:curr(tmpremain)),0)),
+ Cases=lists:flatten(dec_enumerated_cases(NamedNumberList,Tmpremain,0)),
+ emit({Cases++"_->exit({error,{asn1,{decode_enumerated,{",Tmpterm,
+ ",",{asis,NamedNumberList},"}}}}) end",nl}),
+ emit_dec_enumerated_end().
+
+emit_dec_enumerated_begin() ->
+ case get(component_type) of
+ {true,_} ->
+ emit({" begin",nl});
+ _ -> ok
+ end.
+
+emit_dec_enumerated_end() ->
+ case get(component_type) of
+ {true,_} ->
+ emit(" end");
+ _ -> ok
+ end.
+
+% dec_enumerated_cases(NNL,Tmpremain,No) ->
+% Cases=dec_enumerated_cases1(NNL,Tmpremain,0),
+% lists:flatten(io_lib:format("(case ~s "++Cases++
+% "~s when atom(~s)->exit({error,{asn1,{namednumber,~s}}});_->~s end)",[Value,"TmpVal","TmpVal","TmpVal",Value])).
+
+dec_enumerated_cases([Name|Rest],Tmpremain,No) ->
+ io_lib:format("~w->{~w,~s};",[No,Name,Tmpremain])++
+ dec_enumerated_cases(Rest,Tmpremain,No+1);
+dec_enumerated_cases([],_,_) ->
+ "".
+
+
+% more_genfields(_Fields,[]) ->
+% false;
+% more_genfields(Fields,[{FieldName,_}|T]) ->
+% case is_typefield(Fields,FieldName) of
+% true -> true;
+% {false,objectfield} -> true;
+% {false,_} -> more_genfields(Fields,T)
+% end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_name.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_name.erl
new file mode 100644
index 0000000000..03252bd7d9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_name.erl
@@ -0,0 +1,225 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_name.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_name).
+
+%%-compile(export_all).
+-export([name_server_loop/1,
+ start/0,
+ stop/0,
+ push/1,
+ pop/1,
+ curr/1,
+ clear/0,
+ delete/1,
+ active/1,
+ prev/1,
+ next/1,
+ all/1,
+ new/1]).
+
+start() ->
+ start_server(asn1_ns, asn1ct_name,name_server_loop,[[]]).
+
+stop() -> stop_server(asn1_ns).
+
+name_server_loop(Vars) ->
+%% io:format("name -- ~w~n",[Vars]),
+ receive
+ {From,{current,Variable}} ->
+ From ! {asn1_ns,get_curr(Vars,Variable)},
+ name_server_loop(Vars);
+ {From,{pop,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(pop_var(Vars,Variable));
+ {From,{push,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(push_var(Vars,Variable));
+ {From,{delete,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(delete_var(Vars,Variable));
+ {From,{new,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(new_var(Vars,Variable));
+ {From,{prev,Variable}} ->
+ From ! {asn1_ns,get_prev(Vars,Variable)},
+ name_server_loop(Vars);
+ {From,{next,Variable}} ->
+ From ! {asn1_ns,get_next(Vars,Variable)},
+ name_server_loop(Vars);
+ {From,stop} ->
+ From ! {asn1_ns,stopped},
+ exit(normal)
+ end.
+
+active(V) ->
+ case curr(V) of
+ nil -> false;
+ _ -> true
+ end.
+
+req(Req) ->
+ asn1_ns ! {self(), Req},
+ receive {asn1_ns, Reply} -> Reply end.
+
+pop(V) -> req({pop,V}).
+push(V) -> req({push,V}).
+clear() -> req(stop), start().
+curr(V) -> req({current,V}).
+new(V) -> req({new,V}).
+delete(V) -> req({delete,V}).
+prev(V) ->
+ case req({prev,V}) of
+ none ->
+ exit('cant get prev of none');
+ Rep -> Rep
+ end.
+
+next(V) ->
+ case req({next,V}) of
+ none ->
+ exit('cant get next of none');
+ Rep -> Rep
+ end.
+
+all(V) ->
+ Curr = curr(V),
+ if Curr == V -> [];
+ true ->
+ lists:reverse(generate(V,last(Curr),[],0))
+ end.
+
+generate(V,Number,Res,Pos) ->
+ Ell = Pos+1,
+ if
+ Ell > Number ->
+ Res;
+ true ->
+ generate(V,Number,[list_to_atom(lists:concat([V,Ell]))|Res],Ell)
+ end.
+
+last(V) ->
+ last2(lists:reverse(atom_to_list(V))).
+
+last2(RevL) ->
+ list_to_integer(lists:reverse(get_digs(RevL))).
+
+
+get_digs([H|T]) ->
+ if
+ H < $9+1,
+ H > $0-1 ->
+ [H|get_digs(T)];
+ true ->
+ []
+ end.
+
+push_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ [{Variable,[0]}|Vars];
+ {value,{Variable,[Digit|Drest]}} ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,[Digit,Digit|Drest]}|NewVars]
+ end.
+
+pop_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ ok;
+ {value,{Variable,[_Dig]}} ->
+ lists:keydelete(Variable,1,Vars);
+ {value,{Variable,[_Dig|Digits]}} ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,Digits}|NewVars]
+ end.
+
+get_curr([],Variable) ->
+ Variable;
+get_curr([{Variable,[0|_Drest]}|_Tail],Variable) ->
+ Variable;
+get_curr([{Variable,[Digit|_Drest]}|_Tail],Variable) ->
+ list_to_atom(lists:concat([Variable,integer_to_list(Digit)]));
+
+get_curr([_|Tail],Variable) ->
+ get_curr(Tail,Variable).
+
+new_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ [{Variable,[1]}|Vars];
+ {value,{Variable,[Digit|Drest]}} ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,[Digit+1|Drest]}|NewVars]
+ end.
+
+delete_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ Vars;
+ {value,{Variable,[N]}} when N =< 1 ->
+ lists:keydelete(Variable,1,Vars);
+ {value,{Variable,[Digit|Drest]}} ->
+ case Digit of
+ 0 ->
+ Vars;
+ _ ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,[Digit-1|Drest]}|NewVars]
+ end
+ end.
+
+get_prev(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ none;
+ {value,{Variable,[Digit|_]}} when Digit =< 1 ->
+ Variable;
+ {value,{Variable,[Digit|_]}} when Digit > 1 ->
+ list_to_atom(lists:concat([Variable,
+ integer_to_list(Digit-1)]));
+ _ ->
+ none
+ end.
+
+get_next(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ list_to_atom(lists:concat([Variable,"1"]));
+ {value,{Variable,[Digit|_]}} when Digit >= 0 ->
+ list_to_atom(lists:concat([Variable,
+ integer_to_list(Digit+1)]));
+ _ ->
+ none
+ end.
+
+
+stop_server(Name) ->
+ stop_server(Name, whereis(Name)).
+stop_server(_Name, undefined) -> stopped;
+stop_server(Name, _Pid) ->
+ Name ! {self(), stop},
+ receive {Name, _} -> stopped end.
+
+
+start_server(Name,Mod,Fun,Args) ->
+ case whereis(Name) of
+ undefined ->
+ register(Name, spawn(Mod,Fun, Args));
+ _Pid ->
+ already_started
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser.yrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser.yrl
new file mode 100644
index 0000000000..df74685cb7
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser.yrl
@@ -0,0 +1,1175 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_parser.yrl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+Nonterminals
+ModuleDefinition ModuleIdentifier DefinitiveIdentifier DefinitiveObjIdComponentList
+DefinitiveObjIdComponent TagDefault ExtensionDefault
+ModuleBody Exports SymbolsExported Imports SymbolsImported
+SymbolsFromModuleList SymbolsFromModule GlobalModuleReference AssignedIdentifier SymbolList
+Symbol Reference AssignmentList Assignment
+ExtensionAndException
+ComponentTypeLists
+Externaltypereference Externalvaluereference DefinedType DefinedValue
+AbsoluteReference ItemSpec ItemId ComponentId TypeAssignment
+ValueAssignment
+% ValueSetTypeAssignment
+ValueSet
+Type BuiltinType NamedType ReferencedType
+Value ValueNotNull BuiltinValue ReferencedValue NamedValue
+% BooleanType
+BooleanValue IntegerType NamedNumberList NamedNumber SignedNumber
+% inlined IntegerValue
+EnumeratedType
+% inlined Enumerations
+Enumeration EnumerationItem
+% inlined EnumeratedValue
+% RealType
+RealValue NumericRealValue SpecialRealValue BitStringType
+% inlined BitStringValue
+IdentifierList
+% OctetStringType
+% inlined OctetStringValue
+% NullType NullValue
+SequenceType ComponentTypeList ComponentType
+% SequenceValue SequenceOfValue
+ComponentValueList SequenceOfType
+SAndSOfValue ValueList SetType
+% SetValue SetOfValue
+SetOfType
+ChoiceType
+% AlternativeTypeList made common with ComponentTypeList
+ChoiceValue
+AnyValue
+AnyDefBy
+SelectionType
+TaggedType Tag ClassNumber Class
+% redundant TaggedValue
+% EmbeddedPDVType EmbeddedPDVValue ExternalType ExternalValue ObjectIdentifierType
+ObjectIdentifierValue ObjIdComponentList ObjIdComponent
+% NameForm NumberForm NameAndNumberForm
+CharacterStringType
+RestrictedCharacterStringValue CharacterStringList
+% CharSyms CharsDefn
+Quadruple
+% Group Plane Row Cell
+Tuple
+% TableColumn TableRow
+% UnrestrictedCharacterString
+CharacterStringValue
+% UnrestrictedCharacterStringValue
+ConstrainedType Constraint ConstraintSpec TypeWithConstraint
+ElementSetSpecs ElementSetSpec
+%GeneralConstraint
+UserDefinedConstraint UserDefinedConstraintParameter
+UserDefinedConstraintParameters
+ExceptionSpec
+ExceptionIdentification
+Unions
+UnionMark
+UElems
+Intersections
+IntersectionElements
+IntersectionMark
+IElems
+Elements
+Elems
+SubTypeElements
+Exclusions
+LowerEndpoint
+UpperEndpoint
+LowerEndValue
+UpperEndValue
+TypeConstraints NamedConstraint PresenceConstraint
+
+ParameterizedTypeAssignment
+ParameterList
+Parameters
+Parameter
+ParameterizedType
+
+% X.681
+ObjectClassAssignment ObjectClass ObjectClassDefn
+FieldSpecs FieldSpec OptionalitySpec WithSyntaxSpec
+TokenOrGroupSpecs TokenOrGroupSpec
+SyntaxList OptionalGroup RequiredToken Word
+TypeOptionalitySpec
+ValueOrObjectOptSpec
+VSetOrOSetOptSpec
+ValueOptionalitySpec
+ObjectOptionalitySpec
+ValueSetOptionalitySpec
+ObjectSetOptionalitySpec
+% X.681 chapter 15
+InformationFromObjects
+ValueFromObject
+%ValueSetFromObjects
+TypeFromObject
+%ObjectFromObject
+%ObjectSetFromObjects
+ReferencedObjects
+FieldName
+PrimitiveFieldName
+
+ObjectAssignment
+ObjectSetAssignment
+ObjectSet
+ObjectSetElements
+Object
+ObjectDefn
+DefaultSyntax
+DefinedSyntax
+FieldSettings
+FieldSetting
+DefinedSyntaxTokens
+DefinedSyntaxToken
+Setting
+DefinedObject
+ObjectFromObject
+ObjectSetFromObjects
+ParameterizedObject
+ExternalObjectReference
+DefinedObjectSet
+DefinedObjectClass
+ExternalObjectClassReference
+
+% X.682
+TableConstraint
+ComponentRelationConstraint
+ComponentIdList
+
+% X.683
+ActualParameter
+.
+
+%UsefulType.
+
+Terminals
+'ABSENT' 'ABSTRACT-SYNTAX' 'ALL' 'ANY'
+'APPLICATION' 'AUTOMATIC' 'BEGIN' 'BIT'
+'BOOLEAN' 'BY' 'CHARACTER' 'CHOICE' 'CLASS' 'COMPONENT'
+'COMPONENTS' 'CONSTRAINED' 'DEFAULT' 'DEFINED' 'DEFINITIONS'
+'EMBEDDED' 'END' 'ENUMERATED' 'EXCEPT' 'EXPLICIT'
+'EXPORTS' 'EXTENSIBILITY' 'EXTERNAL' 'FALSE' 'FROM' 'GeneralizedTime'
+'TYPE-IDENTIFIER'
+'IDENTIFIER' 'IMPLICIT' 'IMPLIED' 'IMPORTS'
+'INCLUDES' 'INSTANCE' 'INTEGER' 'INTERSECTION'
+'MAX' 'MIN' 'MINUS-INFINITY' 'NULL'
+'OBJECT' 'ObjectDescriptor' 'OCTET' 'OF' 'OPTIONAL' 'PDV' 'PLUS-INFINITY'
+'PRESENT' 'PRIVATE' 'REAL' 'SEQUENCE' 'SET' 'SIZE'
+'STRING' 'SYNTAX' 'TAGS' 'TRUE' 'UNION'
+'UNIQUE' 'UNIVERSAL' 'UTCTime' 'WITH'
+'{' '}' '(' ')' '.' '::=' ';' ',' '@' '*' '-' '[' ']'
+'!' '..' '...' '|' '<' ':' '^'
+number identifier typereference restrictedcharacterstringtype
+bstring hstring cstring typefieldreference valuefieldreference
+objectclassreference word.
+
+Rootsymbol ModuleDefinition.
+Endsymbol '$end'.
+
+Left 300 'EXCEPT'.
+Left 200 '^'.
+Left 200 'INTERSECTION'.
+Left 100 '|'.
+Left 100 'UNION'.
+
+
+ModuleDefinition -> ModuleIdentifier
+ 'DEFINITIONS'
+ TagDefault
+ ExtensionDefault
+ '::='
+ 'BEGIN'
+ ModuleBody
+ 'END' :
+ {'ModuleBody',Ex,Im,Types} = '$7',
+ {{typereference,Pos,Name},Defid} = '$1',
+ #module{
+ pos= Pos,
+ name= Name,
+ defid= Defid,
+ tagdefault='$3',
+ extensiondefault='$4',
+ exports=Ex,
+ imports=Im,
+ typeorval=Types}.
+% {module, '$1','$3','$6'}.
+% Results always in a record of type module defined in asn_records.hlr
+
+ModuleIdentifier -> typereference DefinitiveIdentifier :
+ put(asn1_module,'$1'#typereference.val),
+ {'$1','$2'}.
+
+DefinitiveIdentifier -> '{' DefinitiveObjIdComponentList '}' : '$2' .
+DefinitiveIdentifier -> '$empty': [].
+
+DefinitiveObjIdComponentList -> DefinitiveObjIdComponent : ['$1'].
+DefinitiveObjIdComponentList -> DefinitiveObjIdComponent DefinitiveObjIdComponentList : ['$1'|'$2'].
+
+DefinitiveObjIdComponent -> identifier : '$1' . %expanded->
+% DefinitiveObjIdComponent -> NameForm : '$1' .
+DefinitiveObjIdComponent -> number : '$1' . %expanded->
+% DefinitiveObjIdComponent -> DefinitiveNumberForm : 'fix' .
+DefinitiveObjIdComponent -> identifier '(' number ')' : {'$1','$3'} . %expanded->
+% DefinitiveObjIdComponent -> DefinitiveNameAndNumberForm : {'$1','$3'} .
+
+% DefinitiveNumberForm -> number : 'fix' .
+
+% DefinitiveNameAndNumberForm -> identifier '(' DefinitiveNumberForm ')' : 'fix' .
+
+TagDefault -> 'EXPLICIT' 'TAGS' : put(tagdefault,'EXPLICIT'),'EXPLICIT' .
+TagDefault -> 'IMPLICIT' 'TAGS' : put(tagdefault,'IMPLICIT'),'IMPLICIT' .
+TagDefault -> 'AUTOMATIC' 'TAGS' : put(tagdefault,'AUTOMATIC'),'AUTOMATIC' .
+TagDefault -> '$empty': put(tagdefault,'EXPLICIT'),'EXPLICIT'. % because this is the default
+
+ExtensionDefault -> 'EXTENSIBILITY' 'IMPLIED' : 'IMPLIED'.
+ExtensionDefault -> '$empty' : 'false'. % because this is the default
+
+ModuleBody -> Exports Imports AssignmentList : {'ModuleBody','$1','$2','$3'}.
+ModuleBody -> '$empty' : {'ModuleBody',nil,nil,[]}.
+
+Exports -> 'EXPORTS' SymbolList ';' : {exports,'$2'}.
+Exports -> 'EXPORTS' ';' : {exports,[]}.
+Exports -> '$empty' : {exports,all} .
+
+% inlined above SymbolsExported -> SymbolList : '$1'.
+% inlined above SymbolsExported -> '$empty' : [].
+
+Imports -> 'IMPORTS' SymbolsFromModuleList ';' : {imports,'$2'}.
+Imports -> 'IMPORTS' ';' : {imports,[]}.
+Imports -> '$empty' : {imports,[]} .
+
+% inlined above SymbolsImported -> SymbolsFromModuleList : '$1'.
+% inlined above SymbolsImported -> '$empty' : [].
+
+SymbolsFromModuleList -> SymbolsFromModule :['$1'].
+% SymbolsFromModuleList -> SymbolsFromModuleList SymbolsFromModule :$1.%changed
+SymbolsFromModuleList -> SymbolsFromModule SymbolsFromModuleList :['$1'|'$2'].
+
+% expanded SymbolsFromModule -> SymbolList 'FROM' GlobalModuleReference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+SymbolsFromModule -> SymbolList 'FROM' typereference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+SymbolsFromModule -> SymbolList 'FROM' typereference '{' ValueList '}': #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+%SymbolsFromModule -> SymbolList 'FROM' typereference identifier: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+%SymbolsFromModule -> SymbolList 'FROM' typereference Externalvaluereference: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+%SymbolsFromModule -> SymbolList 'FROM' typereference DefinedValue: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+
+% inlined GlobalModuleReference -> typereference AssignedIdentifier : {'$1','$2'} .
+
+% inlined above AssignedIdentifier -> '{' ValueList '}' : '$2'.
+% replaced AssignedIdentifier -> '{' DefinedValue ObjIdComponentList '}' :{'$2','$3'}.
+% not necessary , replaced by SAndSOfValue AssignedIdentifier -> ObjectIdentifierValue :'$1'.
+% AssignedIdentifier -> DefinedValue : '$1'.
+% inlined AssignedIdentifier -> '$empty' : undefined.
+
+SymbolList -> Symbol : ['$1'].
+SymbolList -> Symbol ',' SymbolList :['$1'|'$3'].
+
+Symbol -> Reference :'$1'.
+% later Symbol -> ParameterizedReference :'$1'.
+
+Reference -> typereference :'$1'.
+Reference -> identifier:'$1'.
+Reference -> typereference '{' '}':'$1'.
+Reference -> Externaltypereference '{' '}':'$1'.
+
+% later Reference -> objectclassreference :'$1'.
+% later Reference -> objectreference :'$1'.
+% later Reference -> objectsetreference :'$1'.
+
+AssignmentList -> Assignment : ['$1'].
+% modified AssignmentList -> AssignmentList Assignment : '$1'.
+AssignmentList -> Assignment AssignmentList : ['$1'|'$2'].
+
+Assignment -> TypeAssignment : '$1'.
+Assignment -> ValueAssignment : '$1'.
+% later Assignment -> ValueSetTypeAssignment : '$1'.
+Assignment -> ObjectClassAssignment : '$1'.
+% later Assignment -> ObjectAssignment : '$1'.
+% combined with ValueAssignment Assignment -> ObjectAssignment : '$1'.
+Assignment -> ObjectSetAssignment : '$1'.
+Assignment -> ParameterizedTypeAssignment : '$1'.
+%Assignment -> ParameterizedValueAssignment : '$1'.
+%Assignment -> ParameterizedValueSetTypeAssignment : '$1'.
+%Assignment -> ParameterizedObjectClassAssignment : '$1'.
+
+ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' :
+%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5',[]}}.
+ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
+%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5','$7'}}.
+
+FieldSpecs -> FieldSpec : ['$1'].
+FieldSpecs -> FieldSpec ',' FieldSpecs : ['$1'|'$3'].
+
+FieldSpec -> typefieldreference TypeOptionalitySpec : {typefield,'$1','$2'}.
+
+FieldSpec -> valuefieldreference Type 'UNIQUE' ValueOrObjectOptSpec :
+ {fixedtypevaluefield,'$1','$2','UNIQUE','$4'}.
+FieldSpec -> valuefieldreference Type ValueOrObjectOptSpec :
+ {fixedtypevaluefield,'$1','$2',undefined,'$3'}.
+
+FieldSpec -> valuefieldreference typefieldreference ValueOrObjectOptSpec :
+ {variabletypevaluefield, '$1','$2','$3'}.
+
+FieldSpec -> typefieldreference typefieldreference VSetOrOSetOptSpec :
+ {variabletypevaluesetfield, '$1','$2','$3'}.
+
+FieldSpec -> typefieldreference Type VSetOrOSetOptSpec :
+ {fixedtypevaluesetfield, '$1','$2','$3'}.
+
+TypeOptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
+TypeOptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
+TypeOptionalitySpec -> '$empty' : 'MANDATORY'.
+
+ValueOrObjectOptSpec -> ValueOptionalitySpec : '$1'.
+ValueOrObjectOptSpec -> ObjectOptionalitySpec : '$1'.
+ValueOrObjectOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
+ValueOrObjectOptSpec -> '$empty' : 'MANDATORY'.
+
+ValueOptionalitySpec -> 'DEFAULT' Value :
+ case '$2' of
+ {identifier,_,Id} -> {'DEFAULT',Id};
+ _ -> {'DEFAULT','$2'}
+ end.
+
+%ObjectOptionalitySpec -> 'DEFAULT' Object :{'DEFAULT','$1'}.
+ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting ',' FieldSettings '}' :
+ {'DEFAULT',{object,['$2'|'$4']}}.
+ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting '}' :
+ {'DEFAULT',{object, ['$2']}}.
+%ObjectOptionalitySpec -> 'DEFAULT' '{' DefinedSyntaxTokens '}' :
+% {'DEFAULT',{object, '$2'}}.
+ObjectOptionalitySpec -> 'DEFAULT' ObjectFromObject :
+ {'DEFAULT',{object, '$2'}}.
+
+
+VSetOrOSetOptSpec -> ValueSetOptionalitySpec : '$1'.
+%VSetOrOSetOptSpec -> ObjectSetOptionalitySpec : '$1'.
+VSetOrOSetOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
+VSetOrOSetOptSpec -> '$empty' : 'MANDATORY'.
+
+ValueSetOptionalitySpec -> 'DEFAULT' ValueSet : {'DEFAULT','$1'}.
+
+%ObjectSetOptionalitySpec -> 'DEFAULT' ObjectSet : {'DEFAULT','$1'}.
+
+OptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
+OptionalitySpec -> 'DEFAULT' ValueNotNull :
+ case '$2' of
+ {identifier,_,Id} -> {'DEFAULT',Id};
+ _ -> {'DEFAULT','$2'}
+ end.
+OptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
+OptionalitySpec -> '$empty' : 'MANDATORY'.
+
+WithSyntaxSpec -> 'WITH' 'SYNTAX' SyntaxList : {'WITH SYNTAX','$3'}.
+
+SyntaxList -> '{' TokenOrGroupSpecs '}' : '$2'.
+SyntaxList -> '{' '}' : [].
+
+TokenOrGroupSpecs -> TokenOrGroupSpec : ['$1'].
+TokenOrGroupSpecs -> TokenOrGroupSpec TokenOrGroupSpecs : ['$1'|'$2'].
+
+TokenOrGroupSpec -> RequiredToken : '$1'.
+TokenOrGroupSpec -> OptionalGroup : '$1'.
+
+OptionalGroup -> '[' TokenOrGroupSpecs ']' : '$2'.
+
+RequiredToken -> typereference : '$1'.
+RequiredToken -> Word : '$1'.
+RequiredToken -> ',' : '$1'.
+RequiredToken -> PrimitiveFieldName : '$1'.
+
+Word -> 'BY' : 'BY'.
+
+ParameterizedTypeAssignment -> typereference ParameterList '::=' Type :
+ #ptypedef{pos=element(2,'$1'),name=element(3,'$1'),
+ args='$2', typespec='$4'}.
+
+ParameterList -> '{' Parameters '}':'$2'.
+
+Parameters -> Parameter: ['$1'].
+Parameters -> Parameter ',' Parameters: ['$1'|'$3'].
+
+Parameter -> typereference: '$1'.
+Parameter -> Value: '$1'.
+Parameter -> Type ':' typereference: {'$1','$3'}.
+Parameter -> Type ':' Value: {'$1','$3'}.
+Parameter -> '{' typereference '}': {objectset,'$2'}.
+
+
+% Externaltypereference -> modulereference '.' typereference : {'$1','$3'} .
+Externaltypereference -> typereference '.' typereference : #'Externaltypereference'{pos=element(2,'$1'),module=element(3,'$1'),type=element(3,'$3')}.
+
+% Externalvaluereference -> modulereference '.' valuereference : {'$1','$3'} .
+% inlined Externalvaluereference -> typereference '.' identifier : #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),value=element(3,'$3')}.
+
+
+DefinedType -> Externaltypereference : '$1' .
+DefinedType -> typereference :
+ #'Externaltypereference'{pos='$1'#typereference.pos,
+ module= get(asn1_module),
+ type= '$1'#typereference.val} .
+DefinedType -> typereference ParameterList : {pt,'$1','$2'}.
+DefinedType -> Externaltypereference ParameterList : {pt,'$1','$2'}.
+
+% ActualParameterList -> '{' ActualParameters '}' : '$1'.
+
+% ActualParameters -> ActualParameter : ['$1'].
+% ActualParameters -> ActualParameter ',' ActualParameters : ['$1'|'$3'].
+
+ActualParameter -> Type : '$1'.
+ActualParameter -> ValueNotNull : '$1'.
+ActualParameter -> ValueSet : '$1'.
+% later DefinedType -> ParameterizedType : '$1' .
+% later DefinedType -> ParameterizedValueSetType : '$1' .
+
+% inlined DefinedValue -> Externalvaluereference :'$1'.
+% inlined DefinedValue -> identifier :'$1'.
+% later DefinedValue -> ParameterizedValue :'$1'.
+
+% not referenced yet AbsoluteReference -> '@' GlobalModuleReference '.' ItemSpec :{'$2','$4'}.
+
+% not referenced yet ItemSpec -> typereference :'$1'.
+% not referenced yet ItemSpec -> ItemId '.' ComponentId : {'$1','$3'}.
+
+% not referenced yet ItemId -> ItemSpec : '$1'.
+
+% not referenced yet ComponentId -> identifier :'$1'.
+% not referenced yet ComponentId -> number :'$1'.
+% not referenced yet ComponentId -> '*' :'$1'.
+
+TypeAssignment -> typereference '::=' Type :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec='$3'}.
+
+ValueAssignment -> identifier Type '::=' Value :
+ #valuedef{pos=element(2,'$1'),name=element(3,'$1'),type='$2',value='$4'}.
+
+% later ValueSetTypeAssignment -> typereference Type '::=' ValueSet :{'ValueSetTypeAssignment','$1','$2','$4'}.
+
+
+ValueSet -> '{' ElementSetSpec '}' : {valueset,'$2'}.
+
+% record(type,{tag,def,constraint}).
+Type -> BuiltinType :#type{def='$1'}.
+Type -> 'NULL' :#type{def='NULL'}.
+Type -> TaggedType:'$1'.
+Type -> ReferencedType:#type{def='$1'}. % change notag later
+Type -> ConstrainedType:'$1'.
+
+%ANY is here for compatibility with the old ASN.1 standard from 1988
+BuiltinType -> 'ANY' AnyDefBy:
+ case '$2' of
+ [] -> 'ANY';
+ _ -> {'ANY DEFINED BY','$2'}
+ end.
+BuiltinType -> BitStringType :'$1'.
+BuiltinType -> 'BOOLEAN' :element(1,'$1').
+BuiltinType -> CharacterStringType :'$1'.
+BuiltinType -> ChoiceType :'$1'.
+BuiltinType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
+BuiltinType -> EnumeratedType :'$1'.
+BuiltinType -> 'EXTERNAL' :element(1,'$1').
+% later BuiltinType -> InstanceOfType :'$1'.
+BuiltinType -> IntegerType :'$1'.
+% BuiltinType -> 'NULL' :element(1,'$1').
+% later BuiltinType -> ObjectClassFieldType :'$1'.
+BuiltinType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
+BuiltinType -> 'OCTET' 'STRING' :'OCTET STRING'.
+BuiltinType -> 'REAL' :element(1,'$1').
+BuiltinType -> SequenceType :'$1'.
+BuiltinType -> SequenceOfType :'$1'.
+BuiltinType -> SetType :'$1'.
+BuiltinType -> SetOfType :'$1'.
+% The so called Useful types
+BuiltinType -> 'GeneralizedTime': 'GeneralizedTime'.
+BuiltinType -> 'UTCTime' :'UTCTime'.
+BuiltinType -> 'ObjectDescriptor' : 'ObjectDescriptor'.
+
+% moved BuiltinType -> TaggedType :'$1'.
+
+
+AnyDefBy -> 'DEFINED' 'BY' identifier: '$3'.
+AnyDefBy -> '$empty': [].
+
+NamedType -> identifier Type :
+%{_,Pos,Val} = '$1',
+%{'NamedType',Pos,{Val,'$2'}}.
+V1 = '$1',
+{'NamedType',V1#identifier.pos,{V1#identifier.val,'$2'}}.
+NamedType -> SelectionType :'$1'.
+
+ReferencedType -> DefinedType : '$1'.
+% redundant ReferencedType -> UsefulType : 'fix'.
+ReferencedType -> SelectionType : '$1'.
+ReferencedType -> TypeFromObject : '$1'.
+% later ReferencedType -> ValueSetFromObjects : 'fix'.
+
+% to much conflicts Value -> AnyValue :'$1'.
+Value -> ValueNotNull : '$1'.
+Value -> 'NULL' :element(1,'$1').
+
+ValueNotNull -> BuiltinValue :'$1'.
+% inlined Value -> DefinedValue :'$1'. % DefinedValue , identifier
+% inlined Externalvaluereference -> Externalvaluereference :'$1'.
+ValueNotNull -> typereference '.' identifier :
+ #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
+ value=element(3,'$3')}.
+ValueNotNull -> identifier :'$1'.
+
+
+%tmp Value -> NamedNumber: '$1'. % not a value but part of ObjIdC
+% redundant BuiltinValue -> BitStringValue :'$1'.
+BuiltinValue -> BooleanValue :'$1'.
+BuiltinValue -> CharacterStringValue :'$1'.
+BuiltinValue -> ChoiceValue :'$1'.
+% BuiltinValue -> EmbeddedPDVValue :'$1'. ==SequenceValue
+% BuiltinValue -> EnumeratedValue :'$1'. identifier
+% BuiltinValue -> ExternalValue :'$1'. ==SequenceValue
+% later BuiltinValue -> InstanceOfValue :'$1'.
+BuiltinValue -> SignedNumber :'$1'.
+% BuiltinValue -> 'NULL' :'$1'.
+% later BuiltinValue -> ObjectClassFieldValue :'$1'.
+% replaced by SAndSOfValue BuiltinValue -> ObjectIdentifierValue :'$1'.
+BuiltinValue -> bstring :element(3,'$1').
+BuiltinValue -> hstring :element(3,'$1').
+% conflict BuiltinValue -> RealValue :'$1'.
+BuiltinValue -> SAndSOfValue :'$1'.
+% replaced BuiltinValue -> SequenceOfValue :'$1'.
+% replaced BuiltinValue -> SequenceValue :'$1'.
+% replaced BuiltinValue -> SetValue :'$1'.
+% replaced BuiltinValue -> SetOfValue :'$1'.
+% conflict redundant BuiltinValue -> TaggedValue :'$1'.
+
+% inlined ReferencedValue -> DefinedValue:'$1'.
+% ReferencedValue -> Externalvaluereference:'$1'.
+% ReferencedValue -> identifier :'$1'.
+% later ReferencedValue -> ValueFromObject:'$1'.
+
+% inlined BooleanType -> BOOLEAN :'BOOLEAN'.
+
+% to much conflicts AnyValue -> Type ':' Value : {'ANYVALUE',{'$1','$3'}}.
+
+BooleanValue -> TRUE :true.
+BooleanValue -> FALSE :false.
+
+IntegerType -> 'INTEGER' : 'INTEGER'.
+IntegerType -> 'INTEGER' '{' NamedNumberList '}' : {'INTEGER','$3'}.
+
+NamedNumberList -> NamedNumber :['$1'].
+% modified NamedNumberList -> NamedNumberList ',' NamedNumber :'fix'.
+NamedNumberList -> NamedNumber ',' NamedNumberList :['$1'|'$3'].
+
+NamedNumber -> identifier '(' SignedNumber ')' : {'NamedNumber',element(3,'$1'),'$3'}.
+NamedNumber -> identifier '(' typereference '.' identifier ')' : {'NamedNumber',element(3,'$1'),{'ExternalValue',element(3,'$3'),element(3,'$5')}}.
+NamedNumber -> identifier '(' identifier ')' : {'NamedNumber',element(3,'$1'),element(3,'$3')}.
+
+%NamedValue -> identifier Value :
+% {'NamedValue',element(2,'$1'),element(3,'$1'),'$2'}.
+
+
+SignedNumber -> number : element(3,'$1').
+SignedNumber -> '-' number : - element(3,'$1').
+
+% inlined IntegerValue -> SignedNumber :'$1'.
+% conflict moved to Value IntegerValue -> identifier:'$1'.
+
+EnumeratedType -> ENUMERATED '{' Enumeration '}' :{'ENUMERATED','$3'}.
+
+% inlined Enumerations -> Enumeration :{'$1','false',[]}.
+% inlined Enumerations -> Enumeration ',' '...' : {'$1','true',[]}.
+% inlined Enumerations -> Enumeration ',' '...' ',' Enumeration : {'$1','true','$5'}.
+
+Enumeration -> EnumerationItem :['$1'].
+% modified Enumeration -> EnumerationItem ',' Enumeration :'fix'.
+Enumeration -> EnumerationItem ',' Enumeration :['$1'|'$3'].
+
+EnumerationItem -> identifier:element(3,'$1').
+EnumerationItem -> NamedNumber :'$1'.
+EnumerationItem -> '...' :'EXTENSIONMARK'.
+
+% conflict moved to Value EnumeratedValue -> identifier:'$1'.
+
+% inlined RealType -> REAL:'REAL'.
+
+RealValue -> NumericRealValue :'$1'.
+RealValue -> SpecialRealValue:'$1'.
+
+% ?? NumericRealValue -> number:'$1'. % number MUST BE '0'
+NumericRealValue -> SAndSOfValue : '$1'. % Value of the associated sequence type
+
+SpecialRealValue -> 'PLUS-INFINITY' :'$1'.
+SpecialRealValue -> 'MINUS-INFINITY' :'$1'.
+
+BitStringType -> 'BIT' 'STRING' :{'BIT STRING',[]}.
+BitStringType -> 'BIT' 'STRING' '{' NamedNumberList '}' :{'BIT STRING','$4'}.
+% NamedBitList replaced by NamedNumberList to reduce the grammar
+% Must check later that all "numbers" are positive
+
+% inlined BitStringValue -> bstring:'$1'.
+% inlined BitStringValue -> hstring:'$1'.
+% redundant use SequenceValue BitStringValue -> '{' IdentifierList '}' :$2.
+% redundant use SequenceValue BitStringValue -> '{' '}' :'fix'.
+
+IdentifierList -> identifier :[element(3,'$1')].
+% modified IdentifierList -> IdentifierList ',' identifier :'$1'.
+IdentifierList -> identifier ',' IdentifierList :[element(3,'$1')|'$3'].
+
+% inlined OctetStringType -> 'OCTET' 'STRING' :'OCTET STRING'.
+
+% inlined OctetStringValue -> bstring:'$1'.
+% inlined OctetStringValue -> hstring:'$1'.
+
+% inlined NullType -> 'NULL':'NULL'.
+
+% inlined NullValue -> NULL:'NULL'.
+
+% result is {'SEQUENCE',Optionals,Extensionmark,Componenttypelist}.
+SequenceType -> SEQUENCE '{' ComponentTypeList '}' :{'SEQUENCE','$3'}.
+% SequenceType -> SEQUENCE '{' ComponentTypeLists '}' :{'SEQUENCE','$3'}.
+% SequenceType -> SEQUENCE '{' ExtensionAndException '}' :{'SEQUENCE','$3'}.
+SequenceType -> SEQUENCE '{' '}' :{'SEQUENCE',[]}.
+
+% result is {RootComponentList,ExtensionAndException,AdditionalComponentTypeList}.
+%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException :{'$1','$3',[]}.
+%ComponentTypeLists -> ComponentTypeList :{'$1','false',[]}.
+%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException
+% ',' ComponentTypeList :{'$1','$3', '$5'}.
+%ComponentTypeLists -> ExtensionAndException ',' ComponentTypeList :{[],'$1','$3'}.
+
+ComponentTypeList -> ComponentType :['$1'].
+% modified below ComponentTypeList -> ComponentTypeList ',' ComponentType :'$1'.
+ComponentTypeList -> ComponentType ',' ComponentTypeList :['$1'|'$3'].
+
+% -record('ComponentType',{pos,name,type,attrib}).
+ComponentType -> '...' ExceptionSpec :{'EXTENSIONMARK',element(2,'$1'),'$2'}.
+ComponentType -> NamedType :
+ {'NamedType',Pos,{Name,Type}} = '$1',
+ #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop=mandatory}.
+ComponentType -> NamedType 'OPTIONAL' :
+ {'NamedType',Pos,{Name,Type}} = '$1',
+ #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop='OPTIONAL'}.
+ComponentType -> NamedType 'DEFAULT' Value:
+ {'NamedType',Pos,{Name,Type}} = '$1',
+ #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop={'DEFAULT','$3'}}.
+ComponentType -> 'COMPONENTS' 'OF' Type :{'COMPONENTS OF','$3'}.
+
+% redundant ExtensionAndException -> '...' : extensionmark.
+% ExtensionAndException -> '...' ExceptionSpec : {extensionmark,'$2'}.
+
+% replaced SequenceValue -> '{' ComponentValueList '}':'$2'.
+% replaced SequenceValue -> '{' '}':[].
+
+ValueList -> Value :['$1'].
+ValueList -> NamedNumber :['$1'].
+% modified ValueList -> ValueList ',' Value :'$1'.
+ValueList -> Value ',' ValueList :['$1'|'$3'].
+ValueList -> Value ',' '...' :['$1' |[]].
+ValueList -> Value ValueList : ['$1',space|'$2'].
+ValueList -> NamedNumber ValueList: ['$1',space|'$2'].
+
+%ComponentValueList -> identifier ObjIdComponent:[{'NamedValue','$1','$2'}].
+%ComponentValueList -> NamedValue :['$1'].
+%ComponentValueList -> NamedValue ',' ComponentValueList:['$1'|'$3'].
+%ComponentValueList -> identifier ObjIdComponent ',' ComponentValueList :[{'NamedValue', '$1','$2'}|'$4'].
+
+SequenceOfType -> SEQUENCE OF Type : {'SEQUENCE OF','$3'}.
+
+% replaced SequenceOfValue with SAndSOfValue
+
+SAndSOfValue -> '{' ValueList '}' :'$2'.
+%SAndSOfValue -> '{' ComponentValueList '}' :'$2'.
+SAndSOfValue -> '{' '}' :[].
+
+% save for later SetType ->
+% result is {'SET',Optionals,Extensionmark,Componenttypelist}.
+SetType -> SET '{' ComponentTypeList '}' :{'SET','$3'}.
+% SetType -> SET '{' ExtensionAndException '}' :{'SET','$3'}.
+SetType -> SET '{' '}' :{'SET',[]}.
+
+% replaced SetValue with SAndSOfValue
+
+SetOfType -> SET OF Type : {'SET OF','$3'}.
+
+% replaced SetOfValue with SAndSOfValue
+
+ChoiceType -> 'CHOICE' '{' ComponentTypeList '}' :{'CHOICE','$3'}.
+% AlternativeTypeList is replaced by ComponentTypeList
+ChoiceValue -> identifier ':' Value : {'ChoiceValue',element(3,'$1'),'$3'}.
+% save for later SelectionType ->
+
+TaggedType -> Tag Type : '$2'#type{tag=['$1'#tag{type={default,get(tagdefault)}}]}.
+TaggedType -> Tag IMPLICIT Type :'$3'#type{tag=['$1'#tag{type='IMPLICIT'}]}.
+TaggedType -> Tag EXPLICIT Type :'$3'#type{tag=['$1'#tag{type='EXPLICIT'}]}.
+
+Tag -> '[' Class ClassNumber ']': #tag{class='$2',number='$3'}.
+Tag -> '[' Class typereference '.' identifier ']':
+ #tag{class='$2',number=#'Externalvaluereference'{pos=element(2,'$3'),module=element(3,'$3'),
+ value=element(3,'$5')}}.
+Tag -> '[' Class number ']': #tag{class='$2',number=element(3,'$3')}.
+Tag -> '[' Class identifier ']': #tag{class='$2',number=element(3,'$3')}.
+
+ClassNumber -> number :element(3,'$1').
+% inlined above ClassNumber -> typereference '.' identifier :{'Externalvaluereference',element(3,'$1'),element(3,'$3')}.
+ClassNumber -> identifier :element(3,'$1').
+
+Class -> 'UNIVERSAL' :element(1,'$1').
+Class -> 'APPLICATION' :element(1,'$1').
+Class -> 'PRIVATE' :element(1,'$1').
+Class -> '$empty' :'CONTEXT'.
+
+% conflict redundant TaggedValue -> Value:'$1'.
+
+% inlined EmbeddedPDVType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
+
+% inlined EmbeddedPDVValue -> SequenceValue:'$1'.
+
+% inlined ExternalType -> 'EXTERNAL' :'EXTERNAL'.
+
+% inlined ExternalValue -> SequenceValue :'$1'.
+
+% inlined ObjectIdentifierType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
+
+ObjectIdentifierValue -> '{' ObjIdComponentList '}' :'$2'.
+% inlined ObjectIdentifierValue -> SequenceAndSequenceOfValue :'$1'.
+% ObjectIdentifierValue -> '{' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue','$2','$3'}.
+% ObjectIdentifierValue -> '{' typereference '.' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue',{'$2','$4'},'$5'}.
+
+ObjIdComponentList -> Value:'$1'.
+ObjIdComponentList -> Value ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> DefinedValue:'$1'.
+%ObjIdComponentList -> number:'$1'.
+%ObjIdComponentList -> DefinedValue ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> number ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
+
+% redundant ObjIdComponent -> NameForm :'$1'. % expanded
+% replaced by 2 ObjIdComponent -> NumberForm :'$1'.
+% ObjIdComponent -> number :'$1'.
+% ObjIdComponent -> DefinedValue :'$1'. % means DefinedValue
+% ObjIdComponent -> NameAndNumberForm :'$1'.
+% ObjIdComponent -> NamedNumber :'$1'.
+% NamedBit replaced by NamedNumber to reduce grammar
+% must check later that "number" is positive
+
+% NameForm -> identifier:'$1'.
+
+% inlined NumberForm -> number :'$1'.
+% inlined NumberForm -> DefinedValue :'$1'.
+
+% replaced by NamedBit NameAndNumberForm -> identifier '(' NumberForm ')'.
+% NameAndNumberForm -> NamedBit:'$1'.
+
+
+CharacterStringType -> restrictedcharacterstringtype :element(3,'$1').
+CharacterStringType -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
+
+RestrictedCharacterStringValue -> cstring :element(3, '$1').
+% modified below RestrictedCharacterStringValue -> CharacterStringList :'$1'.
+% conflict vs BuiltinValue RestrictedCharacterStringValue -> SequenceAndSequenceOfValue :'$1'.
+RestrictedCharacterStringValue -> Quadruple :'$1'.
+RestrictedCharacterStringValue -> Tuple :'$1'.
+
+% redundant CharacterStringList -> '{' ValueList '}' :'$2'. % modified
+
+% redundant CharSyms -> CharsDefn :'$1'.
+% redundant CharSyms -> CharSyms ',' CharsDefn :['$1'|'$3'].
+
+% redundant CharsDefn -> cstring :'$1'.
+% temporary replaced see below CharsDefn -> DefinedValue :'$1'.
+% redundant CharsDefn -> Value :'$1'.
+
+Quadruple -> '{' number ',' number ',' number ',' number '}' :{'Quadruple','$2','$4','$6','$8'}.
+% {Group,Plane,Row,Cell}
+
+Tuple -> '{' number ',' number '}' :{'Tuple', '$2','$4'}.
+% {TableColumn,TableRow}
+
+% inlined UnrestrictedCharacterString -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
+
+CharacterStringValue -> RestrictedCharacterStringValue :'$1'.
+% conflict vs BuiltinValue CharacterStringValue -> SequenceValue :'$1'. % UnrestrictedCharacterStringValue
+
+% inlined UsefulType -> typereference :'$1'.
+
+SelectionType -> identifier '<' Type : {'SelectionType',element(3,'$1'),'$3'}.
+
+ConstrainedType -> Type Constraint :
+ '$1'#type{constraint=merge_constraints(['$2'])}.
+ConstrainedType -> Type Constraint Constraint :
+ '$1'#type{constraint=merge_constraints(['$2','$3'])}.
+ConstrainedType -> Type Constraint Constraint Constraint:
+ '$1'#type{constraint=merge_constraints(['$2','$3','$4'])}.
+ConstrainedType -> Type Constraint Constraint Constraint Constraint:
+ '$1'#type{constraint=merge_constraints(['$2','$3','$4','$5'])}.
+%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
+%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
+ConstrainedType -> TypeWithConstraint :'$1'.
+
+TypeWithConstraint -> 'SET' Constraint 'OF' Type :
+ #type{def = {'SET OF','$4'},constraint=merge_constraints(['$2'])}.
+TypeWithConstraint -> 'SET' 'SIZE' Constraint 'OF' Type :
+ #type{def = {'SET OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
+TypeWithConstraint -> 'SEQUENCE' Constraint 'OF' Type :
+ #type{def = {'SEQUENCE OF','$4'},constraint =
+ merge_constraints(['$2'])}.
+TypeWithConstraint -> 'SEQUENCE' 'SIZE' Constraint 'OF' Type :
+ #type{def = {'SEQUENCE OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
+
+
+Constraint -> '(' ConstraintSpec ExceptionSpec ')' :
+ #constraint{c='$2',e='$3'}.
+
+% inlined Constraint -> SubTypeConstraint :'$1'.
+ConstraintSpec -> ElementSetSpecs :'$1'.
+ConstraintSpec -> UserDefinedConstraint :'$1'.
+ConstraintSpec -> TableConstraint :'$1'.
+
+TableConstraint -> ComponentRelationConstraint : '$1'.
+TableConstraint -> ObjectSet : '$1'.
+%TableConstraint -> '{' typereference '}' :tableconstraint.
+
+ComponentRelationConstraint -> '{' typereference '}' '{' '@' ComponentIdList '}' : componentrelation.
+ComponentRelationConstraint -> '{' typereference '}' '{' '@' '.' ComponentIdList '}' : componentrelation.
+
+ComponentIdList -> identifier: ['$1'].
+ComponentIdList -> identifier '.' ComponentIdList: ['$1'| '$3'].
+
+
+% later ConstraintSpec -> GeneralConstraint :'$1'.
+
+% from X.682
+UserDefinedConstraint -> 'CONSTRAINED' 'BY' '{' '}' : {constrained_by,[]}.
+UserDefinedConstraint -> 'CONSTRAINED' 'BY'
+ '{' UserDefinedConstraintParameters '}' : {constrained_by,'$4'}.
+
+UserDefinedConstraintParameters -> UserDefinedConstraintParameter : ['$1'].
+UserDefinedConstraintParameters ->
+ UserDefinedConstraintParameter ','
+ UserDefinedConstraintParameters: ['$1'|'$3'].
+
+UserDefinedConstraintParameter -> Type '.' ActualParameter : {'$1','$3'}.
+UserDefinedConstraintParameter -> ActualParameter : '$1'.
+
+
+
+ExceptionSpec -> '!' ExceptionIdentification : '$1'.
+ExceptionSpec -> '$empty' : undefined.
+
+ExceptionIdentification -> SignedNumber : '$1'.
+% inlined ExceptionIdentification -> DefinedValue : '$1'.
+ExceptionIdentification -> typereference '.' identifier :
+ #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
+ value=element(3,'$1')}.
+ExceptionIdentification -> identifier :'$1'.
+ExceptionIdentification -> Type ':' Value : {'$1','$3'}.
+
+% inlined SubTypeConstraint -> ElementSetSpec
+
+ElementSetSpecs -> ElementSetSpec : '$1'.
+ElementSetSpecs -> ElementSetSpec ',' '...': {'$1',[]}.
+ElementSetSpecs -> '...' ',' ElementSetSpec : {[],'$3'}.
+ElementSetSpecs -> ElementSetSpec ',' '...' ',' ElementSetSpec : {'$1','$5'}.
+
+ElementSetSpec -> Unions : '$1'.
+ElementSetSpec -> 'ALL' Exclusions : {'ALL','$2'}.
+
+Unions -> Intersections : '$1'.
+Unions -> UElems UnionMark IntersectionElements :
+ case {'$1','$3'} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {'SingleValue',ordsets:union(to_set(V1),to_set(V2))}
+ end.
+
+UElems -> Unions :'$1'.
+
+Intersections -> IntersectionElements :'$1'.
+Intersections -> IElems IntersectionMark IntersectionElements :
+ case {'$1','$3'} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {'SingleValue',ordsets:intersection(to_set(V1),to_set(V2))};
+ {V1,V2} when list(V1) ->
+ V1 ++ [V2];
+ {V1,V2} ->
+ [V1,V2]
+ end.
+%Intersections -> IElems '^' IntersectionElements :{'INTERSECTION','$1','$3'}.
+%Intersections -> IElems 'INTERSECTION' IntersectionElements :{'INTERSECTION','$1','$3'}.
+
+IElems -> Intersections :'$1'.
+
+IntersectionElements -> Elements :'$1'.
+IntersectionElements -> Elems Exclusions :{'$1','$2'}.
+
+Elems -> Elements :'$1'.
+
+Exclusions -> 'EXCEPT' Elements :{'EXCEPT','$2'}.
+
+IntersectionMark -> 'INTERSECTION':'$1'.
+IntersectionMark -> '^':'$1'.
+UnionMark -> 'UNION':'$1'.
+UnionMark -> '|':'$1'.
+
+
+Elements -> SubTypeElements : '$1'.
+%Elements -> ObjectSetElements : '$1'.
+Elements -> '(' ElementSetSpec ')' : '$2'.
+Elements -> ReferencedType : '$1'.
+
+SubTypeElements -> ValueList : {'SingleValue','$1'}. % NOTE it must be a Value
+% The rule above modifyed only because of conflicts
+SubTypeElements -> 'INCLUDES' Type : {'ContainedSubType','$2'}.
+%not lalr1 if this is activated SubTypeElements -> Type : {'TypeConstraint','$1'}.
+SubTypeElements -> LowerEndpoint '..' UpperEndpoint : {'ValueRange',{'$1','$3'}}.
+SubTypeElements -> 'FROM' Constraint : {'PermittedAlphabet','$2'#constraint.c}.
+SubTypeElements -> 'SIZE' Constraint: {'SizeConstraint','$2'#constraint.c}.
+% later will introduce conflicts related to NULL SubTypeElements -> Type : {'TypeConstraint','$1'}.
+SubTypeElements -> 'WITH' 'COMPONENT' Constraint:{'WITH COMPONENT','$3'}.
+SubTypeElements -> 'WITH' 'COMPONENTS' '{' TypeConstraints '}':{'WITH COMPONENTS',{'FullSpecification','$4'}}.
+SubTypeElements -> 'WITH' 'COMPONENTS' '{' '...' ',' TypeConstraints '}' :{'WITH COMPONENTS',{'PartialSpecification','$3'}}.
+
+% inlined above InnerTypeConstraints ::=
+% inlined above SingleTypeConstraint::= Constraint
+% inlined above MultipleTypeConstraints ::= FullSpecification | PartialSpecification
+% inlined above FullSpecification ::= "{" TypeConstraints "}"
+% inlined above PartialSpecification ::= "{" "..." "," TypeConstraints "}"
+% TypeConstraints -> identifier : [{'NamedConstraint',element(3,'$1'),undefined,undefined}]. % is this really meaningful or allowed
+TypeConstraints -> NamedConstraint : ['$1'].
+TypeConstraints -> NamedConstraint ',' TypeConstraints : ['$1'|'$3'].
+TypeConstraints -> identifier : ['$1'].
+TypeConstraints -> identifier ',' TypeConstraints : ['$1'|'$3'].
+
+NamedConstraint -> identifier Constraint PresenceConstraint :{'NamedConstraint',element(3,'$1'),'$2','$3'}.
+NamedConstraint -> identifier Constraint :{'NamedConstraint',element(3,'$1'),'$2',undefined}.
+NamedConstraint -> identifier PresenceConstraint :{'NamedConstraint',element(3,'$1'),undefined,'$2'}.
+
+PresenceConstraint -> 'PRESENT' : 'PRESENT'.
+PresenceConstraint -> 'ABSENT' : 'ABSENT'.
+PresenceConstraint -> 'OPTIONAL' : 'OPTIONAL'.
+
+
+
+LowerEndpoint -> LowerEndValue :'$1'.
+%LowerEndpoint -> LowerEndValue '<':{gt,'$1'}.
+LowerEndpoint -> LowerEndValue '<':('$1'+1).
+
+UpperEndpoint -> UpperEndValue :'$1'.
+%UpperEndpoint -> '<' UpperEndValue :{lt,'$2'}.
+UpperEndpoint -> '<' UpperEndValue :('$2'-1).
+
+LowerEndValue -> Value :'$1'.
+LowerEndValue -> 'MIN' :'MIN'.
+
+UpperEndValue -> Value :'$1'.
+UpperEndValue -> 'MAX' :'MAX'.
+
+
+% X.681
+
+
+% X.681 chap 15
+
+%TypeFromObject -> ReferencedObjects '.' FieldName : {'$1','$3'}.
+TypeFromObject -> typereference '.' FieldName : {'$1','$3'}.
+
+ReferencedObjects -> typereference : '$1'.
+%ReferencedObjects -> ParameterizedObject
+%ReferencedObjects -> DefinedObjectSet
+%ReferencedObjects -> ParameterizedObjectSet
+
+FieldName -> typefieldreference : ['$1'].
+FieldName -> valuefieldreference : ['$1'].
+FieldName -> FieldName '.' FieldName : ['$1' | '$3'].
+
+PrimitiveFieldName -> typefieldreference : '$1'.
+PrimitiveFieldName -> valuefieldreference : '$1'.
+
+%ObjectSetAssignment -> typereference DefinedObjectClass '::=' ObjectSet: null.
+ObjectSetAssignment -> typereference typereference '::=' ObjectSet :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'ObjectSet',element(3,'$2'), '$4'}}.
+ObjectSetAssignment -> typereference typereference '.' typereference '::=' ObjectSet.
+
+ObjectSet -> '{' ElementSetSpecs '}' : '$2'.
+ObjectSet -> '{' '...' '}' : ['EXTENSIONMARK'].
+
+%ObjectSetElements -> Object.
+% ObjectSetElements -> identifier : '$1'.
+%ObjectSetElements -> DefinedObjectSet.
+%ObjectSetElements -> ObjectSetFromObjects.
+%ObjectSetElements -> ParameterizedObjectSet.
+
+%ObjectAssignment -> identifier DefinedObjectClass '::=' Object.
+ObjectAssignment -> ValueAssignment.
+%ObjectAssignment -> identifier typereference '::=' Object.
+%ObjectAssignment -> identifier typereference '.' typereference '::=' Object.
+
+%Object -> DefinedObject: '$1'.
+%Object -> ExternalObjectReference: '$1'.%Object -> DefinedObject: '$1'.
+Object -> typereference '.' identifier: '$1'.%Object -> DefinedObject: '$1'.
+Object -> identifier: '$1'.%Object -> DefinedObject: '$1'.
+
+%Object -> ObjectDefn -> DefaultSyntax: '$1'.
+Object -> '{' FieldSetting ',' FieldSettings '}' : ['$2'|'$4'].
+Object -> '{' FieldSetting '}' :['$2'].
+
+%% For User-friendly notation
+%% Object -> ObjectDefn -> DefinedSyntax
+Object -> '{' '}'.
+Object -> '{' DefinedSyntaxTokens '}'.
+
+% later Object -> ParameterizedObject: '$1'. look in x.683
+
+%DefinedObject -> ExternalObjectReference: '$1'.
+%DefinedObject -> identifier: '$1'.
+
+DefinedObjectClass -> typereference.
+%DefinedObjectClass -> objectclassreference.
+DefinedObjectClass -> ExternalObjectClassReference.
+%DefinedObjectClass -> typereference '.' objectclassreference.
+%%DefinedObjectClass -> UsefulObjectClassReference.
+
+ExternalObjectReference -> typereference '.' identifier.
+ExternalObjectClassReference -> typereference '.' typereference.
+%%ExternalObjectClassReference -> typereference '.' objectclassreference.
+
+ObjectDefn -> DefaultSyntax: '$1'.
+%ObjectDefn -> DefinedSyntax: '$1'.
+
+ObjectFromObject -> ReferencedObjects '.' FieldName : {'ObjectFromObject','$1','$3'}.
+
+% later look in x.683 ParameterizedObject ->
+
+%DefaultSyntax -> '{' '}'.
+%DefaultSyntax -> '{' FieldSettings '}': '$2'.
+DefaultSyntax -> '{' FieldSetting ',' FieldSettings '}': '$2'.
+DefaultSyntax -> '{' FieldSetting '}': '$2'.
+
+FieldSetting -> PrimitiveFieldName Setting: {'$1','$2'}.
+
+FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
+FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
+FieldSettings -> FieldSetting: '$1'.
+
+%DefinedSyntax -> '{' '}'.
+DefinedSyntax -> '{' DefinedSyntaxTokens '}': '$2'.
+
+DefinedSyntaxTokens -> DefinedSyntaxToken: '$1'.
+DefinedSyntaxTokens -> DefinedSyntaxToken DefinedSyntaxTokens: ['$1'|'$2'].
+
+% expanded DefinedSyntaxToken -> Literal: '$1'.
+%DefinedSyntaxToken -> typereference: '$1'.
+DefinedSyntaxToken -> word: '$1'.
+DefinedSyntaxToken -> ',': '$1'.
+DefinedSyntaxToken -> Setting: '$1'.
+%DefinedSyntaxToken -> '$empty': nil .
+
+% Setting ::= Type|Value|ValueSet|Object|ObjectSet
+Setting -> Type: '$1'.
+%Setting -> Value: '$1'.
+%Setting -> ValueNotNull: '$1'.
+Setting -> BuiltinValue: '$1'.
+Setting -> ValueSet: '$1'.
+%Setting -> Object: '$1'.
+%Setting -> ExternalObjectReference.
+Setting -> typereference '.' identifier.
+Setting -> identifier.
+Setting -> ObjectDefn.
+
+Setting -> ObjectSet: '$1'.
+
+
+Erlang code.
+%%-author('[email protected]').
+-copyright('Copyright (c) 1991-99 Ericsson Telecom AB').
+-vsn('$Revision: 1.1 $').
+-include("asn1_records.hrl").
+
+to_set(V) when list(V) ->
+ ordsets:list_to_set(V);
+to_set(V) ->
+ ordsets:list_to_set([V]).
+
+merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
+ {merge_constraints(Rlist,[],[]),
+ merge_constraints(ExtList,[],[])};
+
+merge_constraints(Clist) ->
+ merge_constraints(Clist, [], []).
+
+merge_constraints([Ch|Ct],Cacc, Eacc) ->
+ NewEacc = case Ch#constraint.e of
+ undefined -> Eacc;
+ E -> [E|Eacc]
+ end,
+ merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
+
+merge_constraints([],Cacc,[]) ->
+ lists:flatten(Cacc);
+merge_constraints([],Cacc,Eacc) ->
+ lists:flatten(Cacc) ++ [{'Errors',Eacc}].
+
+fixup_constraint(C) ->
+ case C of
+ {'SingleValue',V} when list(V) ->
+ [C,
+ {'ValueRange',{lists:min(V),lists:max(V)}}];
+ {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
+ V2 = {'SingleValue',
+ ordsets:list_to_set(lists:flatten(V))},
+ {'PermittedAlphabet',V2};
+ {'PermittedAlphabet',{'SingleValue',V}} ->
+ V2 = {'SingleValue',[V]},
+ {'PermittedAlphabet',V2};
+ {'SizeConstraint',Sc} ->
+ {'SizeConstraint',fixup_size_constraint(Sc)};
+
+ List when list(List) ->
+ [fixup_constraint(Xc)||Xc <- List];
+ Other ->
+ Other
+ end.
+
+fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
+ {Lb,Ub};
+fixup_size_constraint({{'ValueRange',R},[]}) ->
+ {R,[]};
+fixup_size_constraint({[],{'ValueRange',R}}) ->
+ {[],R};
+fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
+ {R1,R2};
+fixup_size_constraint({'SingleValue',[Sv]}) ->
+ fixup_size_constraint({'SingleValue',Sv});
+fixup_size_constraint({'SingleValue',L}) when list(L) ->
+ ordsets:list_to_set(L);
+fixup_size_constraint({'SingleValue',L}) ->
+ {L,L};
+fixup_size_constraint({C1,C2}) ->
+ {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser2.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser2.erl
new file mode 100644
index 0000000000..639dcc6622
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_parser2.erl
@@ -0,0 +1,2764 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 2000, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_parser2.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_parser2).
+
+-export([parse/1]).
+-include("asn1_records.hrl").
+
+%% parse all types in module
+parse(Tokens) ->
+ case catch parse_ModuleDefinition(Tokens) of
+ {'EXIT',Reason} ->
+ {error,{{undefined,get(asn1_module),
+ [internal,error,'when',parsing,module,definition,Reason]},
+ hd(Tokens)}};
+ {asn1_error,Reason} ->
+ {error,{Reason,hd(Tokens)}};
+ {ModuleDefinition,Rest1} ->
+ {Types,Rest2} = parse_AssignmentList(Rest1),
+ case Rest2 of
+ [{'END',_}|_Rest3] ->
+ {ok,ModuleDefinition#module{typeorval = Types}};
+ _ ->
+ {error,{{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'END']},
+ hd(Rest2)}}
+ end
+ end.
+
+parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
+ put(asn1_module,ModuleIdentifier),
+ {_DefinitiveIdentifier,Rest02} =
+ case Rest0 of
+ [{'{',_}|_Rest01] ->
+ parse_ObjectIdentifierValue(Rest0);
+ _ ->
+ {[],Rest0}
+ end,
+ Rest = case Rest02 of
+ [{'DEFINITIONS',_}|Rest03] ->
+ Rest03;
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest02)),get(asn1_module),
+ [got,get_token(hd(Rest02)),
+ expected,'DEFINITIONS']}})
+ end,
+ {TagDefault,Rest2} =
+ case Rest of
+ [{'EXPLICIT',_L3},{'TAGS',_L4}|Rest1] ->
+ put(tagdefault,'EXPLICIT'), {'EXPLICIT',Rest1};
+ [{'IMPLICIT',_L3},{'TAGS',_L4}|Rest1] ->
+ put(tagdefault,'IMPLICIT'), {'IMPLICIT',Rest1};
+ [{'AUTOMATIC',_L3},{'TAGS',_L4}|Rest1] ->
+ put(tagdefault,'AUTOMATIC'), {'AUTOMATIC',Rest1};
+ Rest1 ->
+ put(tagdefault,'EXPLICIT'), {'EXPLICIT',Rest1} % The default
+ end,
+ {ExtensionDefault,Rest3} =
+ case Rest2 of
+ [{'EXTENSIBILITY',_L5}, {'IMPLIED',_L6}|Rest21] ->
+ {'IMPLIED',Rest21};
+ _ -> {false,Rest2}
+ end,
+ case Rest3 of
+ [{'::=',_L7}, {'BEGIN',_L8}|Rest4] ->
+ {Exports, Rest5} = parse_Exports(Rest4),
+ {Imports, Rest6} = parse_Imports(Rest5),
+ {#module{ pos = L1,
+ name = ModuleIdentifier,
+ defid = [], % fix this
+ tagdefault = TagDefault,
+ extensiondefault = ExtensionDefault,
+ exports = Exports,
+ imports = Imports},Rest6};
+ _ -> throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,"::= BEGIN"]}})
+ end;
+parse_ModuleDefinition(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typereference]}}).
+
+parse_Exports([{'EXPORTS',_L1},{';',_L2}|Rest]) ->
+ {{exports,[]},Rest};
+parse_Exports([{'EXPORTS',_L1}|Rest]) ->
+ {SymbolList,Rest2} = parse_SymbolList(Rest),
+ case Rest2 of
+ [{';',_}|Rest3] ->
+ {{exports,SymbolList},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,';']}})
+ end;
+parse_Exports(Rest) ->
+ {{exports,all},Rest}.
+
+parse_SymbolList(Tokens) ->
+ parse_SymbolList(Tokens,[]).
+
+parse_SymbolList(Tokens,Acc) ->
+ {Symbol,Rest} = parse_Symbol(Tokens),
+ case Rest of
+ [{',',_L1}|Rest2] ->
+ parse_SymbolList(Rest2,[Symbol|Acc]);
+ Rest2 ->
+ {lists:reverse([Symbol|Acc]),Rest2}
+ end.
+
+parse_Symbol(Tokens) ->
+ parse_Reference(Tokens).
+
+parse_Reference([{typereference,L1,TrefName},{'{',_L2},{'}',_L3}|Rest]) ->
+% {Tref,Rest};
+ {tref2Exttref(L1,TrefName),Rest};
+parse_Reference([Tref1 = {typereference,_,_},{'.',_},Tref2 = {typereference,_,_},
+ {'{',_L2},{'}',_L3}|Rest]) ->
+% {{Tref1,Tref2},Rest};
+ {{tref2Exttref(Tref1),tref2Exttref(Tref2)},Rest};
+parse_Reference([Tref = {typereference,_L1,_TrefName}|Rest]) ->
+ {tref2Exttref(Tref),Rest};
+parse_Reference([Vref = {identifier,_L1,_VName}|Rest]) ->
+ {identifier2Extvalueref(Vref),Rest};
+parse_Reference(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,identifier]]}}).
+
+parse_Imports([{'IMPORTS',_L1},{';',_L2}|Rest]) ->
+ {{imports,[]},Rest};
+parse_Imports([{'IMPORTS',_L1}|Rest]) ->
+ {SymbolsFromModuleList,Rest2} = parse_SymbolsFromModuleList(Rest),
+ case Rest2 of
+ [{';',_L2}|Rest3] ->
+ {{imports,SymbolsFromModuleList},Rest3};
+ Rest3 ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,';']}})
+ end;
+parse_Imports(Tokens) ->
+ {{imports,[]},Tokens}.
+
+parse_SymbolsFromModuleList(Tokens) ->
+ parse_SymbolsFromModuleList(Tokens,[]).
+
+parse_SymbolsFromModuleList(Tokens,Acc) ->
+ {SymbolsFromModule,Rest} = parse_SymbolsFromModule(Tokens),
+ case (catch parse_SymbolsFromModule(Rest)) of
+ {Sl,_Rest2} when record(Sl,'SymbolsFromModule') ->
+ parse_SymbolsFromModuleList(Rest,[SymbolsFromModule|Acc]);
+ _ ->
+ {lists:reverse([SymbolsFromModule|Acc]),Rest}
+ end.
+
+parse_SymbolsFromModule(Tokens) ->
+ SetRefModuleName =
+ fun(N) ->
+ fun(X) when record(X,'Externaltypereference')->
+ X#'Externaltypereference'{module=N};
+ (X) when record(X,'Externalvaluereference')->
+ X#'Externalvaluereference'{module=N}
+ end
+ end,
+ {SymbolList,Rest} = parse_SymbolList(Tokens),
+ case Rest of
+ %%How does this case correspond to x.680 ?
+ [{'FROM',_L1},Tref = {typereference,_,_},Ref={identifier,_L2,_Id},C={',',_}|Rest2] ->
+ {#'SymbolsFromModule'{symbols=SymbolList,
+ module=tref2Exttref(Tref)},[Ref,C|Rest2]};
+ %%How does this case correspond to x.680 ?
+ [{'FROM',_L1},Tref = {typereference,_,_},{identifier,_L2,_Id}|Rest2] ->
+ {#'SymbolsFromModule'{symbols=SymbolList,
+ module=tref2Exttref(Tref)},Rest2};
+ [{'FROM',_L1},Tref = {typereference,_,Name},Brace = {'{',_}|Rest2] ->
+ {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue([Brace|Rest2]), % value not used yet, fix me
+ NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ {#'SymbolsFromModule'{symbols=NewSymbolList,
+ module=tref2Exttref(Tref)},Rest3};
+ [{'FROM',_L1},Tref = {typereference,_,Name}|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ {#'SymbolsFromModule'{symbols=NewSymbolList,
+ module=tref2Exttref(Tref)},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,
+ ['FROM typerefernece identifier ,',
+ 'FROM typereference identifier',
+ 'FROM typereference {',
+ 'FROM typereference']]}})
+ end.
+
+parse_ObjectIdentifierValue([{'{',_}|Rest]) ->
+ parse_ObjectIdentifierValue(Rest,[]).
+
+parse_ObjectIdentifierValue([{number,_,Num}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[Num|Acc]);
+parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {number,_,Num}, {')',_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Num}|Acc]);
+parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {identifier,_,Id2}, {')',_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Id2}|Acc]);
+parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {typereference,_,Tref},{'.',_},{identifier,_,Id2}, {')',_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
+parse_ObjectIdentifierValue([Id = {identifier,_,_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[identifier2Extvalueref(Id)|Acc]);
+parse_ObjectIdentifierValue([{'}',_}|Rest],Acc) ->
+ {lists:reverse(Acc),Rest};
+parse_ObjectIdentifierValue([H|_T],_Acc) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,
+ ['{ some of the following }',number,'identifier ( number )',
+ 'identifier ( identifier )',
+ 'identifier ( typereference.identifier)',identifier]]}}).
+
+parse_AssignmentList(Tokens = [{'END',_}|_Rest]) ->
+ {[],Tokens};
+parse_AssignmentList(Tokens = [{'$end',_}|_Rest]) ->
+ {[],Tokens};
+parse_AssignmentList(Tokens) ->
+ parse_AssignmentList(Tokens,[]).
+
+parse_AssignmentList(Tokens= [{'END',_}|_Rest],Acc) ->
+ {lists:reverse(Acc),Tokens};
+parse_AssignmentList(Tokens= [{'$end',_}|_Rest],Acc) ->
+ {lists:reverse(Acc),Tokens};
+parse_AssignmentList(Tokens,Acc) ->
+ case (catch parse_Assignment(Tokens)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,R} ->
+% [H|T] = Tokens,
+ throw({error,{R,hd(Tokens)}});
+ {Assignment,Rest} ->
+ parse_AssignmentList(Rest,[Assignment|Acc])
+ end.
+
+parse_Assignment(Tokens) ->
+ Flist = [fun parse_TypeAssignment/1,
+ fun parse_ValueAssignment/1,
+ fun parse_ObjectClassAssignment/1,
+ fun parse_ObjectAssignment/1,
+ fun parse_ObjectSetAssignment/1,
+ fun parse_ParameterizedAssignment/1,
+ fun parse_ValueSetTypeAssignment/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {asn1_assignment_error,Reason} ->
+ throw({asn1_error,Reason});
+ Result ->
+ Result
+ end.
+
+
+parse_or(Tokens,Flist) ->
+ parse_or(Tokens,Flist,[]).
+
+parse_or(_Tokens,[],ErrList) ->
+ case ErrList of
+ [] ->
+ throw({asn1_error,{parse_or,ErrList}});
+ L when list(L) ->
+%%% throw({asn1_error,{parse_or,hd(lists:reverse(ErrList))}});
+ %% chose to throw 1) the error with the highest line no,
+ %% 2) the last error which is not a asn1_assignment_error or
+ %% 3) the last error.
+ throw(prioritize_error(ErrList));
+ Other ->
+ throw({asn1_error,{parse_or,Other}})
+ end;
+parse_or(Tokens,[Fun|Frest],ErrList) ->
+ case (catch Fun(Tokens)) of
+ Exit = {'EXIT',_Reason} ->
+ parse_or(Tokens,Frest,[Exit|ErrList]);
+ AsnErr = {asn1_error,_} ->
+ parse_or(Tokens,Frest,[AsnErr|ErrList]);
+ AsnAssErr = {asn1_assignment_error,_} ->
+ parse_or(Tokens,Frest,[AsnAssErr|ErrList]);
+ Result = {_,L} when list(L) ->
+ Result;
+% Result ->
+% Result
+ Error ->
+ parse_or(Tokens,Frest,[Error|ErrList])
+ end.
+
+parse_TypeAssignment([{typereference,L1,Tref},{'::=',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#typedef{pos=L1,name=Tref,typespec=Type},Rest2};
+parse_TypeAssignment([H1,H2|_Rest]) ->
+ throw({asn1_assignment_error,{get_line(H1),get(asn1_module),
+ [got,[get_token(H1),get_token(H2)], expected,
+ typereference,'::=']}});
+parse_TypeAssignment([H|_T]) ->
+ throw({asn1_assignment_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,
+ typereference]}}).
+
+parse_Type(Tokens) ->
+ {Tag,Rest3} = case Tokens of
+ [Lbr= {'[',_}|Rest] ->
+ parse_Tag([Lbr|Rest]);
+ Rest-> {[],Rest}
+ end,
+ {Tag2,Rest4} = case Rest3 of
+ [{'IMPLICIT',_}|Rest31] when record(Tag,tag)->
+ {[Tag#tag{type='IMPLICIT'}],Rest31};
+ [{'EXPLICIT',_}|Rest31] when record(Tag,tag)->
+ {[Tag#tag{type='EXPLICIT'}],Rest31};
+ Rest31 when record(Tag,tag) ->
+ {[Tag#tag{type={default,get(tagdefault)}}],Rest31};
+ Rest31 ->
+ {Tag,Rest31}
+ end,
+ Flist = [fun parse_BuiltinType/1,fun parse_ReferencedType/1,fun parse_TypeWithConstraint/1],
+ {Type,Rest5} = case (catch parse_or(Rest4,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_Reason} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end,
+ case hd(Rest5) of
+ {'(',_} ->
+ {Constraints,Rest6} = parse_Constraints(Rest5),
+ if record(Type,type) ->
+ {Type#type{constraint=merge_constraints(Constraints),
+ tag=Tag2},Rest6};
+ true ->
+ {#type{def=Type,constraint=merge_constraints(Constraints),
+ tag=Tag2},Rest6}
+ end;
+ _ ->
+ if record(Type,type) ->
+ {Type#type{tag=Tag2},Rest5};
+ true ->
+ {#type{def=Type,tag=Tag2},Rest5}
+ end
+ end.
+
+parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
+ case Rest of
+ [{'{',_}|Rest2] ->
+ {NamedNumberList,Rest3} = parse_NamedNumberList(Rest2),
+ case Rest3 of
+ [{'}',_}|Rest4] ->
+ {#type{def={'BIT STRING',NamedNumberList}},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,'}']}})
+ end;
+ _ ->
+ {{'BIT STRING',[]},Rest}
+ end;
+parse_BuiltinType([{'BOOLEAN',_}|Rest]) ->
+ {#type{def='BOOLEAN'},Rest};
+%% CharacterStringType ::= RestrictedCharacterStringType |
+%% UnrestrictedCharacterStringType
+parse_BuiltinType([{restrictedcharacterstringtype,_,StringName}|Rest]) ->
+ {#type{def=StringName},Rest};
+parse_BuiltinType([{'CHARACTER',_},{'STRING',_}|Rest]) ->
+ {#type{def='CHARACTER STRING'},Rest};
+
+parse_BuiltinType([{'CHOICE',_},{'{',_}|Rest]) ->
+ {AlternativeTypeLists,Rest2} = parse_AlternativeTypeLists(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def={'CHOICE',AlternativeTypeLists}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'EMBEDDED',_},{'PDV',_}|Rest]) ->
+ {#type{def='EMBEDDED PDV'},Rest};
+parse_BuiltinType([{'ENUMERATED',_},{'{',_}|Rest]) ->
+ {Enumerations,Rest2} = parse_Enumerations(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def={'ENUMERATED',Enumerations}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'EXTERNAL',_}|Rest]) ->
+ {#type{def='EXTERNAL'},Rest};
+
+% InstanceOfType
+parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
+ {DefinedObjectClass,Rest2} = parse_DefinedObjectClass(Rest),
+ case Rest2 of
+ [{'(',_}|_] ->
+ {Constraint,Rest3} = parse_Constraint(Rest2),
+ {#type{def={'INSTANCE OF',DefinedObjectClass,Constraint}},Rest3};
+ _ ->
+ {#type{def={'INSTANCE OF',DefinedObjectClass,[]}},Rest2}
+ end;
+
+% parse_BuiltinType(Tokens) ->
+
+parse_BuiltinType([{'INTEGER',_}|Rest]) ->
+ case Rest of
+ [{'{',_}|Rest2] ->
+ {NamedNumberList,Rest3} = parse_NamedNumberList(Rest2),
+ case Rest3 of
+ [{'}',_}|Rest4] ->
+ {#type{def={'INTEGER',NamedNumberList}},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,'}']}})
+ end;
+ _ ->
+ {#type{def='INTEGER'},Rest}
+ end;
+parse_BuiltinType([{'NULL',_}|Rest]) ->
+ {#type{def='NULL'},Rest};
+
+% ObjectClassFieldType fix me later
+
+parse_BuiltinType([{'OBJECT',_},{'IDENTIFIER',_}|Rest]) ->
+ {#type{def='OBJECT IDENTIFIER'},Rest};
+parse_BuiltinType([{'OCTET',_},{'STRING',_}|Rest]) ->
+ {#type{def='OCTET STRING'},Rest};
+parse_BuiltinType([{'REAL',_}|Rest]) ->
+ {#type{def='REAL'},Rest};
+parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'}',_}|Rest]) ->
+ {#type{def=#'SEQUENCE'{components=[{'EXTENSIONMARK',Line,undefined}]}},
+ Rest};
+parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
+ {ExceptionIdentification,Rest2} = parse_ExceptionIdentification(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SEQUENCE'{components=[{'EXTENSIONMARK',
+ Line,
+ ExceptionIdentification}]}},
+ Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
+ {ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#type{def={'SEQUENCE OF',Type}},Rest2};
+
+
+parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'}',_}|Rest]) ->
+ {#type{def=#'SET'{components=[{'EXTENSIONMARK',Line,undefined}]}},Rest};
+parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
+ {ExceptionIdentification,Rest2} = parse_ExceptionIdentification(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SET'{components=
+ [{'EXTENSIONMARK',Line,ExceptionIdentification}]}},
+ Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
+ {ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SET'{components=ComponentTypeLists}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SET',_},{'OF',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#type{def={'SET OF',Type}},Rest2};
+
+%% The so called Useful types
+parse_BuiltinType([{'GeneralizedTime',_}|Rest]) ->
+ {#type{def='GeneralizedTime'},Rest};
+parse_BuiltinType([{'UTCTime',_}|Rest]) ->
+ {#type{def='UTCTime'},Rest};
+parse_BuiltinType([{'ObjectDescriptor',_}|Rest]) ->
+ {#type{def='ObjectDescriptor'},Rest};
+
+%% For compatibility with old standard
+parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},{identifier,_,Id}|Rest]) ->
+ {#type{def={'ANY_DEFINED_BY',Id}},Rest};
+parse_BuiltinType([{'ANY',_}|Rest]) ->
+ {#type{def='ANY'},Rest};
+
+parse_BuiltinType(Tokens) ->
+ parse_ObjectClassFieldType(Tokens).
+% throw({asn1_error,unhandled_type}).
+
+
+parse_TypeWithConstraint([{'SEQUENCE',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ Constraint2 =
+ case Constraint of
+ #constraint{c=C} ->
+ Constraint#constraint{c={'SizeConstraint',C}};
+ _ -> Constraint
+ end,
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint2])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint([{'SET',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SET OF',Type}, constraint = merge_constraints([Constraint])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint([{'SET',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ Constraint2 =
+ case Constraint of
+ #constraint{c=C} ->
+ Constraint#constraint{c={'SizeConstraint',C}};
+ _ -> Constraint
+ end,
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SET OF',Type}, constraint = merge_constraints([Constraint2])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['SEQUENCE','SEQUENCE SIZE','SET','SET SIZE'],
+ followed,by,a,constraint]}}).
+
+
+%% --------------------------
+
+parse_ReferencedType(Tokens) ->
+ Flist = [fun parse_DefinedType/1,
+ fun parse_SelectionType/1,
+ fun parse_TypeFromObject/1,
+ fun parse_ValueSetFromObjects/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_DefinedType(Tokens=[{typereference,_,_},{'{',_}|_Rest]) ->
+ parse_ParameterizedType(Tokens);
+parse_DefinedType(Tokens=[{typereference,L1,TypeName},
+ T2={typereference,_,_},T3={'{',_}|Rest]) ->
+ case (catch parse_ParameterizedType(Tokens)) of
+ {'EXIT',_Reason} ->
+ Rest2 = [T2,T3|Rest],
+ {#type{def = #'Externaltypereference'{pos=L1,
+ module=get(asn1_module),
+ type=TypeName}},Rest2};
+ {asn1_error,_} ->
+ Rest2 = [T2,T3|Rest],
+ {#type{def = #'Externaltypereference'{pos=L1,
+ module=get(asn1_module),
+ type=TypeName}},Rest2};
+ Result ->
+ Result
+ end;
+parse_DefinedType([{typereference,L1,Module},{'.',_},{typereference,_,TypeName}|Rest]) ->
+ {#type{def = #'Externaltypereference'{pos=L1,module=Module,type=TypeName}},Rest};
+parse_DefinedType([{typereference,L1,TypeName}|Rest]) ->
+ {#type{def = #'Externaltypereference'{pos=L1,module=get(asn1_module),
+ type=TypeName}},Rest};
+parse_DefinedType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,'typereference.typereference',
+ 'typereference typereference']]}}).
+
+parse_SelectionType([{identifier,_,Name},{'<',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'SelectionType',Name,Type},Rest2};
+parse_SelectionType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'identifier <']}}).
+
+
+%% --------------------------
+
+
+%% This should probably be removed very soon
+% parse_ConstrainedType(Tokens) ->
+% case (catch parse_TypeWithConstraint(Tokens)) of
+% {'EXIT',Reason} ->
+% {Type,Rest} = parse_Type(Tokens),
+% {Constraint,Rest2} = parse_Constraint(Rest),
+% {Type#type{constraint=Constraint},Rest2};
+% {asn1_error,Reason2} ->
+% {Type,Rest} = parse_Type(Tokens),
+% {Constraint,Rest2} = parse_Constraint(Rest),
+% {Type#type{constraint=Constraint},Rest2};
+% Result ->
+% Result
+% end.
+
+parse_Constraints(Tokens) ->
+ parse_Constraints(Tokens,[]).
+
+parse_Constraints(Tokens,Acc) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ case Rest of
+ [{'(',_}|_Rest2] ->
+ parse_Constraints(Rest,[Constraint|Acc]);
+ _ ->
+ {lists:reverse([Constraint|Acc]),Rest}
+ end.
+
+parse_Constraint([{'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_ConstraintSpec(Rest),
+ {Exception,Rest3} = parse_ExceptionSpec(Rest2),
+ case Rest3 of
+ [{')',_}|Rest4] ->
+ {#constraint{c=Constraint,e=Exception},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,')']}})
+ end;
+parse_Constraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'(']}}).
+
+parse_ConstraintSpec(Tokens) ->
+ Flist = [fun parse_GeneralConstraint/1,
+ fun parse_SubtypeConstraint/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason2} ->
+ throw({asn1_error,Reason2});
+ Result ->
+ Result
+ end.
+
+parse_ExceptionSpec([LPar={')',_}|Rest]) ->
+ {undefined,[LPar|Rest]};
+parse_ExceptionSpec([{'!',_}|Rest]) ->
+ parse_ExceptionIdentification(Rest);
+parse_ExceptionSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,[')','!']]}}).
+
+parse_ExceptionIdentification(Tokens) ->
+ Flist = [fun parse_SignedNumber/1,
+ fun parse_DefinedValue/1,
+ fun parse_TypeColonValue/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason2} ->
+ throw({asn1_error,Reason2});
+ Result ->
+ Result
+ end.
+
+parse_TypeColonValue(Tokens) ->
+ {Type,Rest} = parse_Type(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Value,Rest3} = parse_Value(Rest2),
+ {{Type,Value},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+parse_SubtypeConstraint(Tokens) ->
+ parse_ElementSetSpecs(Tokens).
+
+parse_ElementSetSpecs([{'...',_}|Rest]) ->
+ {Elements,Rest2} = parse_ElementSetSpec(Rest),
+ {{[],Elements},Rest2};
+parse_ElementSetSpecs(Tokens) ->
+ {RootElems,Rest} = parse_ElementSetSpec(Tokens),
+ case Rest of
+ [{',',_},{'...',_},{',',_}|Rest2] ->
+ {AdditionalElems,Rest3} = parse_ElementSetSpec(Rest2),
+ {{RootElems,AdditionalElems},Rest3};
+ [{',',_},{'...',_}|Rest2] ->
+ {{RootElems,[]},Rest2};
+ _ ->
+ {RootElems,Rest}
+ end.
+
+parse_ElementSetSpec([{'ALL',_},{'EXCEPT',_}|Rest]) ->
+ {Exclusions,Rest2} = parse_Elements(Rest),
+ {{'ALL',{'EXCEPT',Exclusions}},Rest2};
+parse_ElementSetSpec(Tokens) ->
+ parse_Unions(Tokens).
+
+
+parse_Unions(Tokens) ->
+ {InterSec,Rest} = parse_Intersections(Tokens),
+ {Unions,Rest2} = parse_UnionsRec(Rest),
+ case {InterSec,Unions} of
+ {InterSec,[]} ->
+ {InterSec,Rest2};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest2};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ [union|V2],Rest2};
+ {V1,V2} ->
+ {[V1,union,V2],Rest2}
+% Other ->
+% throw(Other)
+ end.
+
+parse_UnionsRec([{'|',_}|Rest]) ->
+ {InterSec,Rest2} = parse_Intersections(Rest),
+ {URec,Rest3} = parse_UnionsRec(Rest2),
+ case {InterSec,URec} of
+ {V1,[]} ->
+ {V1,Rest3};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3}
+ end;
+parse_UnionsRec([{'UNION',_}|Rest]) ->
+ {InterSec,Rest2} = parse_Intersections(Rest),
+ {URec,Rest3} = parse_UnionsRec(Rest2),
+ case {InterSec,URec} of
+ {V1,[]} ->
+ {V1,Rest3};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3}
+ end;
+parse_UnionsRec(Tokens) ->
+ {[],Tokens}.
+
+parse_Intersections(Tokens) ->
+ {InterSec,Rest} = parse_IntersectionElements(Tokens),
+ {IRec,Rest2} = parse_IElemsRec(Rest),
+ case {InterSec,IRec} of
+ {V1,[]} ->
+ {V1,Rest2};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',
+ ordsets:intersection(to_set(V1),to_set(V2))},Rest2};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ [intersection|V2],Rest2};
+ {V1,V2} ->
+ {[V1,intersection,V2],Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'a Union']}})
+ end.
+
+parse_IElemsRec([{'^',_}|Rest]) ->
+ {InterSec,Rest2} = parse_IntersectionElements(Rest),
+ {IRec,Rest3} = parse_IElemsRec(Rest2),
+ case {InterSec,IRec} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',
+ ordsets:intersection(to_set(V1),to_set(V2))},Rest3};
+ {V1,[]} ->
+ {V1,Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'an Intersection']}})
+ end;
+parse_IElemsRec([{'INTERSECTION',_}|Rest]) ->
+ {InterSec,Rest2} = parse_IntersectionElements(Rest),
+ {IRec,Rest3} = parse_IElemsRec(Rest2),
+ case {InterSec,IRec} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',
+ ordsets:intersection(to_set(V1),to_set(V2))},Rest3};
+ {V1,[]} ->
+ {V1,Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'an Intersection']}})
+ end;
+parse_IElemsRec(Tokens) ->
+ {[],Tokens}.
+
+parse_IntersectionElements(Tokens) ->
+ {InterSec,Rest} = parse_Elements(Tokens),
+ case Rest of
+ [{'EXCEPT',_}|Rest2] ->
+ {Exclusion,Rest3} = parse_Elements(Rest2),
+ {{InterSec,{'EXCEPT',Exclusion}},Rest3};
+ Rest ->
+ {InterSec,Rest}
+ end.
+
+parse_Elements([{'(',_}|Rest]) ->
+ {Elems,Rest2} = parse_ElementSetSpec(Rest),
+ case Rest2 of
+ [{')',_}|Rest3] ->
+ {Elems,Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,')']}})
+ end;
+parse_Elements(Tokens) ->
+ Flist = [fun parse_SubtypeElements/1,
+ fun parse_ObjectSetElements/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ Err = {asn1_error,_} ->
+ throw(Err);
+ Result ->
+ Result
+ end.
+
+
+
+
+%% --------------------------
+
+parse_DefinedObjectClass([{typereference,_,_ModName},{'.',_},Tr={typereference,_,_ObjClName}|Rest]) ->
+%% {{objectclassname,ModName,ObjClName},Rest};
+% {{objectclassname,tref2Exttref(Tr)},Rest};
+ {tref2Exttref(Tr),Rest};
+parse_DefinedObjectClass([Tr={typereference,_,_ObjClName}|Rest]) ->
+% {{objectclassname,tref2Exttref(Tr)},Rest};
+ {tref2Exttref(Tr),Rest};
+parse_DefinedObjectClass([{'TYPE-IDENTIFIER',_}|Rest]) ->
+ {'TYPE-IDENTIFIER',Rest};
+parse_DefinedObjectClass([{'ABSTRACT-SYNTAX',_}|Rest]) ->
+ {'ABSTRACT-SYNTAX',Rest};
+parse_DefinedObjectClass(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['typereference . typereference',
+ typereference,
+ 'TYPE-IDENTIFIER',
+ 'ABSTRACT-SYNTAX']]}}).
+
+parse_ObjectClassAssignment([{typereference,L1,ObjClName},{'::=',_}|Rest]) ->
+ {Type,Rest2} = parse_ObjectClass(Rest),
+ {#classdef{pos=L1,name=ObjClName,typespec=Type},Rest2};
+parse_ObjectClassAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ 'typereference ::=']}}).
+
+parse_ObjectClass(Tokens) ->
+ Flist = [fun parse_DefinedObjectClass/1,
+ fun parse_ObjectClassDefn/1,
+ fun parse_ParameterizedObjectClass/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason2} ->
+ throw({asn1_error,Reason2});
+ Result ->
+ Result
+ end.
+
+parse_ObjectClassDefn([{'CLASS',_},{'{',_}|Rest]) ->
+ {Type,Rest2} = parse_FieldSpec(Rest),
+ {WithSyntaxSpec,Rest3} = parse_WithSyntaxSpec(Rest2),
+ {#objectclass{fields=Type,syntax=WithSyntaxSpec},Rest3};
+parse_ObjectClassDefn(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'CLASS {']}}).
+
+parse_FieldSpec(Tokens) ->
+ parse_FieldSpec(Tokens,[]).
+
+parse_FieldSpec(Tokens,Acc) ->
+ Flist = [fun parse_FixedTypeValueFieldSpec/1,
+ fun parse_VariableTypeValueFieldSpec/1,
+ fun parse_ObjectFieldSpec/1,
+ fun parse_FixedTypeValueSetFieldSpec/1,
+ fun parse_VariableTypeValueSetFieldSpec/1,
+ fun parse_TypeFieldSpec/1,
+ fun parse_ObjectSetFieldSpec/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {Type,[{'}',_}|Rest]} ->
+ {lists:reverse([Type|Acc]),Rest};
+ {Type,[{',',_}|Rest2]} ->
+ parse_FieldSpec(Rest2,[Type|Acc]);
+ {_,[H|_T]} ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end.
+
+parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
+ {{typefieldreference,FieldName},Rest};
+parse_PrimitiveFieldName([{valuefieldreference,_,FieldName}|Rest]) ->
+ {{valuefieldreference,FieldName},Rest};
+parse_PrimitiveFieldName(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typefieldreference,valuefieldreference]]}}).
+
+parse_FieldName(Tokens) ->
+ {Field,Rest} = parse_PrimitiveFieldName(Tokens),
+ parse_FieldName(Rest,[Field]).
+
+parse_FieldName([{'.',_}|Rest],Acc) ->
+ case (catch parse_PrimitiveFieldName(Rest)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {FieldName,Rest2} ->
+ parse_FieldName(Rest2,[FieldName|Acc])
+ end;
+parse_FieldName(Tokens,Acc) ->
+ {lists:reverse(Acc),Tokens}.
+
+parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {Unique,Rest3} =
+ case Rest2 of
+ [{'UNIQUE',_}|Rest4] ->
+ {'UNIQUE',Rest4};
+ _ ->
+ {undefined,Rest2}
+ end,
+ {OptionalitySpec,Rest5} = parse_ValueOptionalitySpec(Rest3),
+ case Unique of
+ 'UNIQUE' ->
+ case OptionalitySpec of
+ {'DEFAULT',_} ->
+ throw({asn1_error,
+ {L1,get(asn1_module),
+ ['UNIQUE and DEFAULT in same field',VFieldName]}});
+ _ ->
+ {{fixedtypevaluefield,VFieldName,Type,Unique,OptionalitySpec},Rest5}
+ end;
+ _ ->
+ {{object_or_fixedtypevalue_field,VFieldName,Type,Unique,OptionalitySpec},Rest5}
+ end;
+parse_FixedTypeValueFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+
+parse_VariableTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) ->
+ {FieldRef,Rest2} = parse_FieldName(Rest),
+ {OptionalitySpec,Rest3} = parse_ValueOptionalitySpec(Rest2),
+ {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},Rest3};
+parse_VariableTypeValueFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+
+parse_ObjectFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ {OptionalitySpec,Rest3} = parse_ObjectOptionalitySpec(Rest2),
+ {{objectfield,VFieldName,Class,OptionalitySpec},Rest3};
+parse_ObjectFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+
+parse_TypeFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {OptionalitySpec,Rest2} = parse_TypeOptionalitySpec(Rest),
+ {{typefield,TFieldName,OptionalitySpec},Rest2};
+parse_TypeFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_FixedTypeValueSetFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
+ {{objectset_or_fixedtypevalueset_field,TFieldName,Type,
+ OptionalitySpec},Rest3};
+parse_FixedTypeValueSetFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_VariableTypeValueSetFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {FieldRef,Rest2} = parse_FieldName(Rest),
+ {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
+ {{variabletypevaluesetfield,TFieldName,FieldRef,OptionalitySpec},Rest3};
+parse_VariableTypeValueSetFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_ObjectSetFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ {OptionalitySpec,Rest3} = parse_ObjectSetOptionalitySpec(Rest2),
+ {{objectsetfield,TFieldName,Class,OptionalitySpec},Rest3};
+parse_ObjectSetFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_ValueOptionalitySpec(Tokens)->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {Value,Rest2} = parse_Value(Rest),
+ {{'DEFAULT',Value},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_ObjectOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {Object,Rest2} = parse_Object(Rest),
+ {{'DEFAULT',Object},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_TypeOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'DEFAULT',Type},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_ValueSetOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {ValueSet,Rest2} = parse_ValueSet(Rest),
+ {{'DEFAULT',ValueSet},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_ObjectSetOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {ObjectSet,Rest2} = parse_ObjectSet(Rest),
+ {{'DEFAULT',ObjectSet},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_WithSyntaxSpec([{'WITH',_},{'SYNTAX',_}|Rest]) ->
+ {SyntaxList,Rest2} = parse_SyntaxList(Rest),
+ {{'WITH SYNTAX',SyntaxList},Rest2};
+parse_WithSyntaxSpec(Tokens) ->
+ {[],Tokens}.
+
+parse_SyntaxList([{'{',_},{'}',_}|Rest]) ->
+ {[],Rest};
+parse_SyntaxList([{'{',_}|Rest]) ->
+ parse_SyntaxList(Rest,[]);
+parse_SyntaxList(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+
+parse_SyntaxList(Tokens,Acc) ->
+ {SyntaxList,Rest} = parse_TokenOrGroupSpec(Tokens),
+ case Rest of
+ [{'}',_}|Rest2] ->
+ {lists:reverse([SyntaxList|Acc]),Rest2};
+ _ ->
+ parse_SyntaxList(Rest,[SyntaxList|Acc])
+ end.
+
+parse_TokenOrGroupSpec(Tokens) ->
+ Flist = [fun parse_RequiredToken/1,
+ fun parse_OptionalGroup/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_RequiredToken([{typereference,L1,WordName}|Rest]) ->
+ case is_word(WordName) of
+ false ->
+ throw({asn1_error,{L1,get(asn1_module),
+ [got,WordName,expected,a,'Word']}});
+ true ->
+ {WordName,Rest}
+ end;
+parse_RequiredToken([{',',L1}|Rest]) ->
+ {{',',L1},Rest};
+parse_RequiredToken([{WordName,L1}|Rest]) ->
+ case is_word(WordName) of
+ false ->
+ throw({asn1_error,{L1,get(asn1_module),
+ [got,WordName,expected,a,'Word']}});
+ true ->
+ {WordName,Rest}
+ end;
+parse_RequiredToken(Tokens) ->
+ parse_PrimitiveFieldName(Tokens).
+
+parse_OptionalGroup([{'[',_}|Rest]) ->
+ {Spec,Rest2} = parse_TokenOrGroupSpec(Rest),
+ {SpecList,Rest3} = parse_OptionalGroup(Rest2,[Spec]),
+ {SpecList,Rest3}.
+
+parse_OptionalGroup([{']',_}|Rest],Acc) ->
+ {lists:reverse(Acc),Rest};
+parse_OptionalGroup(Tokens,Acc) ->
+ {Spec,Rest} = parse_TokenOrGroupSpec(Tokens),
+ parse_OptionalGroup(Rest,[Spec|Acc]).
+
+parse_DefinedObject([Id={identifier,_,_ObjName}|Rest]) ->
+ {{object,identifier2Extvalueref(Id)},Rest};
+parse_DefinedObject([{typereference,L1,ModName},{'.',_},{identifier,_,ObjName}|Rest]) ->
+ {{object, #'Externaltypereference'{pos=L1,module=ModName,type=ObjName}},Rest};
+parse_DefinedObject(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [identifier,'typereference.identifier']]}}).
+
+parse_ObjectAssignment([{identifier,L1,ObjName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Object,Rest4} = parse_Object(Rest3),
+ {#typedef{pos=L1,name=ObjName,
+ typespec=#'Object'{classname=Class,def=Object}},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}});
+ Other ->
+ throw({asn1_error,{L1,get(asn1_module),
+ [got,Other,expected,'::=']}})
+ end;
+parse_ObjectAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_Object(Tokens) ->
+ Flist=[fun parse_ObjectDefn/1,
+ fun parse_ObjectFromObject/1,
+ fun parse_ParameterizedObject/1,
+ fun parse_DefinedObject/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ObjectDefn(Tokens) ->
+ Flist=[fun parse_DefaultSyntax/1,
+ fun parse_DefinedSyntax/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_DefaultSyntax([{'{',_},{'}',_}|Rest]) ->
+ {{object,defaultsyntax,[]},Rest};
+parse_DefaultSyntax([{'{',_}|Rest]) ->
+ parse_DefaultSyntax(Rest,[]);
+parse_DefaultSyntax(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+
+parse_DefaultSyntax(Tokens,Acc) ->
+ {Setting,Rest} = parse_FieldSetting(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_DefaultSyntax(Rest2,[Setting|Acc]);
+ [{'}',_}|Rest3] ->
+ {{object,defaultsyntax,lists:reverse([Setting|Acc])},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,[',','}']]}})
+ end.
+
+parse_FieldSetting(Tokens) ->
+ {{_,PrimFieldName},Rest} = parse_PrimitiveFieldName(Tokens),
+ {Setting,Rest2} = parse_Setting(Rest),
+ {{PrimFieldName,Setting},Rest2}.
+
+parse_DefinedSyntax([{'{',_}|Rest]) ->
+ parse_DefinedSyntax(Rest,[]).
+
+parse_DefinedSyntax(Tokens,Acc) ->
+ case Tokens of
+ [{'}',_}|Rest2] ->
+ {{object,definedsyntax,lists:reverse(Acc)},Rest2};
+ _ ->
+ {DefSynTok,Rest3} = parse_DefinedSyntaxToken(Tokens),
+ parse_DefinedSyntax(Rest3,[DefSynTok|Acc])
+ end.
+
+parse_DefinedSyntaxToken([{',',L1}|Rest]) ->
+ {{',',L1},Rest};
+parse_DefinedSyntaxToken([{typereference,L1,Name}|Rest]) ->
+ case is_word(Name) of
+ false ->
+ {{setting,L1,Name},Rest};
+ true ->
+ {{word_or_setting,L1,Name},Rest}
+ end;
+parse_DefinedSyntaxToken(Tokens) ->
+ case catch parse_Setting(Tokens) of
+ {asn1_error,_} ->
+ parse_Word(Tokens);
+ {'EXIT',Reason} ->
+ exit(Reason);
+ Result ->
+ Result
+ end.
+
+parse_Word([{Name,Pos}|Rest]) ->
+ case is_word(Name) of
+ false ->
+ throw({asn1_error,{Pos,get(asn1_module),
+ [got,Name, expected,a,'Word']}});
+ true ->
+ {{word_or_setting,Pos,Name},Rest}
+ end.
+
+parse_Setting(Tokens) ->
+ Flist = [fun parse_Type/1,
+ fun parse_Value/1,
+ fun parse_Object/1,
+ fun parse_ObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_DefinedObjectSet([{typereference,L1,ModuleName},{'.',_},
+ {typereference,L2,ObjSetName}|Rest]) ->
+ {{objectset,L1,#'Externaltypereference'{pos=L2,module=ModuleName,
+ type=ObjSetName}},Rest};
+parse_DefinedObjectSet([{typereference,L1,ObjSetName}|Rest]) ->
+ {{objectset,L1,#'Externaltypereference'{pos=L1,module=get(asn1_module),
+ type=ObjSetName}},Rest};
+parse_DefinedObjectSet(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,'typereference.typereference']]}}).
+
+parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {ObjectSet,Rest4} = parse_ObjectSet(Rest3),
+ {#typedef{pos=L1,name=ObjSetName,
+ typespec=#'ObjectSet'{class=Class,
+ set=ObjectSet}},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ObjectSetAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ObjectSet([{'{',_}|Rest]) ->
+ {ObjSetSpec,Rest2} = parse_ObjectSetSpec(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {ObjSetSpec,Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+parse_ObjectSet(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ObjectSetSpec([{'...',_}|Rest]) ->
+ {['EXTENSIONMARK'],Rest};
+parse_ObjectSetSpec(Tokens) ->
+ parse_ElementSetSpecs(Tokens).
+
+parse_ObjectSetElements(Tokens) ->
+ Flist = [fun parse_Object/1,
+ fun parse_DefinedObjectSet/1,
+ fun parse_ObjectSetFromObjects/1,
+ fun parse_ParameterizedObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ObjectClassFieldType(Tokens) ->
+ {Class,Rest} = parse_DefinedObjectClass(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {FieldName,Rest3} = parse_FieldName(Rest2),
+ OCFT = #'ObjectClassFieldType'{
+ classname=Class,
+ class=Class,fieldname=FieldName},
+ {#type{def=OCFT},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw(Other)
+ end.
+
+%parse_ObjectClassFieldValue(Tokens) ->
+% Flist = [fun parse_OpenTypeFieldVal/1,
+% fun parse_FixedTypeFieldVal/1],
+% case (catch parse_or(Tokens,Flist)) of
+% {'EXIT',Reason} ->
+% throw(Reason);
+% AsnErr = {asn1_error,_} ->
+% throw(AsnErr);
+% Result ->
+% Result
+% end.
+
+parse_ObjectClassFieldValue(Tokens) ->
+ parse_OpenTypeFieldVal(Tokens).
+
+parse_OpenTypeFieldVal(Tokens) ->
+ {Type,Rest} = parse_Type(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Value,Rest3} = parse_Value(Rest2),
+ {{opentypefieldvalue,Type,Value},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+% parse_FixedTypeFieldVal(Tokens) ->
+% parse_Value(Tokens).
+
+% parse_InformationFromObjects(Tokens) ->
+% Flist = [fun parse_ValueFromObject/1,
+% fun parse_ValueSetFromObjects/1,
+% fun parse_TypeFromObject/1,
+% fun parse_ObjectFromObject/1],
+% case (catch parse_or(Tokens,Flist)) of
+% {'EXIT',Reason} ->
+% throw(Reason);
+% AsnErr = {asn1_error,_} ->
+% throw(AsnErr);
+% Result ->
+% Result
+% end.
+
+parse_ReferencedObjects(Tokens) ->
+ Flist = [fun parse_DefinedObject/1,
+ fun parse_DefinedObjectSet/1,
+ fun parse_ParameterizedObject/1,
+ fun parse_ParameterizedObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ValueFromObject(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ case lists:last(Name) of
+ {valuefieldreference,_} ->
+ {{'ValueFromObject',Objects,Name},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,typefieldreference,expected,
+ valuefieldreference]}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_ValueSetFromObjects(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ case lists:last(Name) of
+ {typefieldreference,_FieldName} ->
+ {{'ValueSetFromObjects',Objects,Name},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,
+ typefieldreference]}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_TypeFromObject(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ case lists:last(Name) of
+ {typefieldreference,_FieldName} ->
+ {{'TypeFromObject',Objects,Name},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,
+ typefieldreference]}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_ObjectFromObject(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ {{'ObjectFromObject',Objects,Name},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_ObjectSetFromObjects(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ {{'ObjectSetFromObjects',Objects,Name},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+% parse_InstanceOfType([{'INSTANCE',_},{'OF',_}|Rest]) ->
+% {Class,Rest2} = parse_DefinedObjectClass(Rest),
+% {{'InstanceOfType',Class},Rest2}.
+
+% parse_InstanceOfValue(Tokens) ->
+% parse_Value(Tokens).
+
+
+
+%% X.682 constraint specification
+
+parse_GeneralConstraint(Tokens) ->
+ Flist = [fun parse_UserDefinedConstraint/1,
+ fun parse_TableConstraint/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_UserDefinedConstraint([{'CONSTRAINED',_},{'BY',_},{'{',_},{'}',_}|Rest])->
+ {{constrained_by,[]},Rest};
+parse_UserDefinedConstraint([{'CONSTRAINED',_},
+ {'BY',_},
+ {'{',_}|Rest]) ->
+ {Param,Rest2} = parse_UserDefinedConstraintParameter(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {{constrained_by,Param},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+parse_UserDefinedConstraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['CONSTRAINED BY {}','CONSTRAINED BY {']]}}).
+
+parse_UserDefinedConstraintParameter(Tokens) ->
+ parse_UserDefinedConstraintParameter(Tokens,[]).
+parse_UserDefinedConstraintParameter(Tokens,Acc) ->
+ Flist = [fun parse_GovernorAndActualParameter/1,
+ fun parse_ActualParameter/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {Result,Rest} ->
+ case Rest of
+ [{',',_}|_Rest2] ->
+ parse_UserDefinedConstraintParameter(Tokens,[Result|Acc]);
+ _ ->
+ {lists:reverse([Result|Acc]),Rest}
+ end
+ end.
+
+parse_GovernorAndActualParameter(Tokens) ->
+ {Governor,Rest} = parse_Governor(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Params,Rest3} = parse_ActualParameter(Rest2),
+ {{'Governor_Params',Governor,Params},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+parse_TableConstraint(Tokens) ->
+ Flist = [fun parse_ComponentRelationConstraint/1,
+ fun parse_SimpleTableConstraint/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_SimpleTableConstraint(Tokens) ->
+ {ObjectSet,Rest} = parse_ObjectSet(Tokens),
+ {{simpletable,ObjectSet},Rest}.
+
+parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
+ {ObjectSet,Rest2} = parse_DefinedObjectSet(Rest),
+ case Rest2 of
+ [{'}',_},{'{',_}|Rest3] ->
+ {AtNot,Rest4} = parse_AtNotationList(Rest3,[]),
+ case Rest4 of
+ [{'}',_}|Rest5] ->
+ {{componentrelation,ObjectSet,AtNot},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,
+ 'ComponentRelationConstraint',ended,with,'}']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ComponentRelationConstraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_AtNotationList(Tokens,Acc) ->
+ {AtNot,Rest} = parse_AtNotation(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_AtNotationList(Rest2,[AtNot|Acc]);
+ _ ->
+ {lists:reverse([AtNot|Acc]),Rest}
+ end.
+
+parse_AtNotation([{'@',_},{'.',_}|Rest]) ->
+ {CIdList,Rest2} = parse_ComponentIdList(Rest),
+ {{innermost,CIdList},Rest2};
+parse_AtNotation([{'@',_}|Rest]) ->
+ {CIdList,Rest2} = parse_ComponentIdList(Rest),
+ {{outermost,CIdList},Rest2};
+parse_AtNotation(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,['@','@.']]}}).
+
+parse_ComponentIdList(Tokens) ->
+ parse_ComponentIdList(Tokens,[]).
+
+parse_ComponentIdList([Id = {identifier,_,_},{'.',_}|Rest],Acc) ->
+ parse_ComponentIdList(Rest,[identifier2Extvalueref(Id)|Acc]);
+parse_ComponentIdList([Id = {identifier,_,_}|Rest],Acc) ->
+ {lists:reverse([identifier2Extvalueref(Id)|Acc]),Rest};
+parse_ComponentIdList(Tokens,_) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [identifier,'identifier.']]}}).
+
+
+
+
+
+% X.683 Parameterization of ASN.1 specifications
+
+parse_Governor(Tokens) ->
+ Flist = [fun parse_Type/1,
+ fun parse_DefinedObjectClass/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ActualParameter(Tokens) ->
+ Flist = [fun parse_Type/1,
+ fun parse_Value/1,
+ fun parse_ValueSet/1,
+ fun parse_DefinedObjectClass/1,
+ fun parse_Object/1,
+ fun parse_ObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ParameterizedAssignment(Tokens) ->
+ Flist = [fun parse_ParameterizedTypeAssignment/1,
+ fun parse_ParameterizedValueAssignment/1,
+ fun parse_ParameterizedValueSetTypeAssignment/1,
+ fun parse_ParameterizedObjectClassAssignment/1,
+ fun parse_ParameterizedObjectAssignment/1,
+ fun parse_ParameterizedObjectSetAssignment/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ AsnAssErr = {asn1_assignment_error,_} ->
+ throw(AsnAssErr);
+ Result ->
+ Result
+ end.
+
+parse_ParameterizedTypeAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Type},
+ Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedTypeAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Type,Rest3} = parse_Type(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {Value,Rest5} = parse_Value(Rest4),
+ {#pvaluedef{pos=L1,name=Name,args=ParameterList,type=Type,
+ value=Value},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedValueAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_ParameterizedValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Type,Rest3} = parse_Type(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {ValueSet,Rest5} = parse_ValueSet(Rest4),
+ {#pvaluesetdef{pos=L1,name=Name,args=ParameterList,
+ type=Type,valueset=ValueSet},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedValueSetTypeAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterizedObjectClassAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Class,Rest4} = parse_ObjectClass(Rest3),
+ {#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Class},
+ Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedObjectClassAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Class,Rest3} = parse_DefinedObjectClass(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {Object,Rest5} = parse_Object(Rest4),
+ {#pobjectdef{pos=L1,name=Name,args=ParameterList,
+ class=Class,def=Object},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ParameterizedObjectAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_ParameterizedObjectSetAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Class,Rest3} = parse_DefinedObjectClass(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {ObjectSet,Rest5} = parse_ObjectSet(Rest4),
+ {#pobjectsetdef{pos=L1,name=Name,args=ParameterList,
+ class=Class,def=ObjectSet},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ParameterizedObjectSetAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterList([{'{',_}|Rest]) ->
+ parse_ParameterList(Rest,[]);
+parse_ParameterList(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ParameterList(Tokens,Acc) ->
+ {Parameter,Rest} = parse_Parameter(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_ParameterList(Rest2,[Parameter|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([Parameter|Acc]),Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,[',','}']]}})
+ end.
+
+parse_Parameter(Tokens) ->
+ Flist = [fun parse_ParamGovAndRef/1,
+ fun parse_Reference/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ParamGovAndRef(Tokens) ->
+ {ParamGov,Rest} = parse_ParamGovernor(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Ref,Rest3} = parse_Reference(Rest2),
+ {{ParamGov,Ref},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+parse_ParamGovernor(Tokens) ->
+ Flist = [fun parse_Governor/1,
+ fun parse_Reference/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+% parse_ParameterizedReference(Tokens) ->
+% {Ref,Rest} = parse_Reference(Tokens),
+% case Rest of
+% [{'{',_},{'}',_}|Rest2] ->
+% {{ptref,Ref},Rest2};
+% _ ->
+% {{ptref,Ref},Rest}
+% end.
+
+parse_SimpleDefinedType([{typereference,L1,ModuleName},{'.',_},
+ {typereference,_,TypeName}|Rest]) ->
+ {#'Externaltypereference'{pos=L1,module=ModuleName,
+ type=TypeName},Rest};
+parse_SimpleDefinedType([Tref={typereference,_,_}|Rest]) ->
+% {#'Externaltypereference'{pos=L2,module=get(asn1_module),
+% type=TypeName},Rest};
+ {tref2Exttref(Tref),Rest};
+parse_SimpleDefinedType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,'typereference.typereference']]}}).
+
+parse_SimpleDefinedValue([{typereference,L1,ModuleName},{'.',_},
+ {identifier,_,Value}|Rest]) ->
+ {{simpledefinedvalue,#'Externalvaluereference'{pos=L1,module=ModuleName,
+ value=Value}},Rest};
+parse_SimpleDefinedValue([{identifier,L2,Value}|Rest]) ->
+ {{simpledefinedvalue,L2,Value},Rest};
+parse_SimpleDefinedValue(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['typereference.identifier',identifier]]}}).
+
+parse_ParameterizedType(Tokens) ->
+ {Type,Rest} = parse_SimpleDefinedType(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{pt,Type,Params},Rest2}.
+
+parse_ParameterizedValue(Tokens) ->
+ {Value,Rest} = parse_SimpleDefinedValue(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{pv,Value,Params},Rest2}.
+
+parse_ParameterizedObjectClass(Tokens) ->
+ {Type,Rest} = parse_DefinedObjectClass(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{poc,Type,Params},Rest2}.
+
+parse_ParameterizedObjectSet(Tokens) ->
+ {ObjectSet,Rest} = parse_DefinedObjectSet(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{pos,ObjectSet,Params},Rest2}.
+
+parse_ParameterizedObject(Tokens) ->
+ {Object,Rest} = parse_DefinedObject(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{po,Object,Params},Rest2}.
+
+parse_ActualParameterList([{'{',_}|Rest]) ->
+ parse_ActualParameterList(Rest,[]);
+parse_ActualParameterList(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ActualParameterList(Tokens,Acc) ->
+ {Parameter,Rest} = parse_ActualParameter(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_ActualParameterList(Rest2,[Parameter|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([Parameter|Acc]),Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,[',','}']]}})
+%%% Other ->
+%%% throw(Other)
+ end.
+
+
+
+
+
+
+
+%-------------------------
+
+is_word(Token) ->
+ case not_allowed_word(Token) of
+ true -> false;
+ _ ->
+ if
+ atom(Token) ->
+ Item = atom_to_list(Token),
+ is_word(Item);
+ list(Token), length(Token) == 1 ->
+ check_one_char_word(Token);
+ list(Token) ->
+ [A|Rest] = Token,
+ case check_first(A) of
+ true ->
+ check_rest(Rest);
+ _ ->
+ false
+ end
+ end
+ end.
+
+not_allowed_word(Name) ->
+ lists:member(Name,["BIT",
+ "BOOLEAN",
+ "CHARACTER",
+ "CHOICE",
+ "EMBEDDED",
+ "END",
+ "ENUMERATED",
+ "EXTERNAL",
+ "FALSE",
+ "INSTANCE",
+ "INTEGER",
+ "INTERSECTION",
+ "MINUS-INFINITY",
+ "NULL",
+ "OBJECT",
+ "OCTET",
+ "PLUS-INFINITY",
+ "REAL",
+ "SEQUENCE",
+ "SET",
+ "TRUE",
+ "UNION"]).
+
+check_one_char_word([A]) when $A =< A, $Z >= A ->
+ true;
+check_one_char_word([_]) ->
+ false. %% unknown item in SyntaxList
+
+check_first(A) when $A =< A, $Z >= A ->
+ true;
+check_first(_) ->
+ false. %% unknown item in SyntaxList
+
+check_rest([R,R|_Rs]) when $- == R ->
+ false; %% two consecutive hyphens are not allowed in a word
+check_rest([R]) when $- == R ->
+ false; %% word cannot end with hyphen
+check_rest([R|Rs]) when $A=<R, $Z>=R; $-==R ->
+ check_rest(Rs);
+check_rest([]) ->
+ true;
+check_rest(_) ->
+ false.
+
+
+to_set(V) when list(V) ->
+ ordsets:list_to_set(V);
+to_set(V) ->
+ ordsets:list_to_set([V]).
+
+
+parse_AlternativeTypeLists(Tokens) ->
+ {AlternativeTypeList,Rest1} = parse_AlternativeTypeList(Tokens),
+ {ExtensionAndException,Rest2} =
+ case Rest1 of
+ [{',',_},{'...',L1},{'!',_}|Rest12] ->
+ {_,Rest13} = parse_ExceptionIdentification(Rest12),
+ %% Exception info is currently thrown away
+ {[#'EXTENSIONMARK'{pos=L1}],Rest13};
+ [{',',_},{'...',L1}|Rest12] ->
+ {[#'EXTENSIONMARK'{pos=L1}],Rest12};
+ _ ->
+ {[],Rest1}
+ end,
+ case ExtensionAndException of
+ [] ->
+ {AlternativeTypeList,Rest2};
+ _ ->
+ {ExtensionAddition,Rest3} =
+ case Rest2 of
+ [{',',_}|Rest23] ->
+ parse_ExtensionAdditionAlternativeList(Rest23);
+ _ ->
+ {[],Rest2}
+ end,
+ {OptionalExtensionMarker,Rest4} =
+ case Rest3 of
+ [{',',_},{'...',L3}|Rest31] ->
+ {[#'EXTENSIONMARK'{pos=L3}],Rest31};
+ _ ->
+ {[],Rest3}
+ end,
+ {AlternativeTypeList ++ ExtensionAndException ++ ExtensionAddition ++ OptionalExtensionMarker, Rest4}
+ end.
+
+
+parse_AlternativeTypeList(Tokens) ->
+ parse_AlternativeTypeList(Tokens,[]).
+
+parse_AlternativeTypeList(Tokens,Acc) ->
+ {NamedType,Rest} = parse_NamedType(Tokens),
+ case Rest of
+ [{',',_},Id = {identifier,_,_}|Rest2] ->
+ parse_AlternativeTypeList([Id|Rest2],[NamedType|Acc]);
+ _ ->
+ {lists:reverse([NamedType|Acc]),Rest}
+ end.
+
+
+
+parse_ExtensionAdditionAlternativeList(Tokens) ->
+ parse_ExtensionAdditionAlternativeList(Tokens,[]).
+
+parse_ExtensionAdditionAlternativeList(Tokens,Acc) ->
+ {Element,Rest0} =
+ case Tokens of
+ [{identifier,_,_}|_Rest] ->
+ parse_NamedType(Tokens);
+ [{'[[',_}|_] ->
+ parse_ExtensionAdditionAlternatives(Tokens)
+ end,
+ case Rest0 of
+ [{',',_}|Rest01] ->
+ parse_ExtensionAdditionAlternativeList(Rest01,[Element|Acc]);
+ _ ->
+ {lists:reverse([Element|Acc]),Rest0}
+ end.
+
+parse_ExtensionAdditionAlternatives([{'[[',_}|Rest]) ->
+ parse_ExtensionAdditionAlternatives(Rest,[]);
+parse_ExtensionAdditionAlternatives(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'[[']}}).
+
+parse_ExtensionAdditionAlternatives([Id = {identifier,_,_}|Rest],Acc) ->
+ {NamedType, Rest2} = parse_NamedType([Id|Rest]),
+ case Rest2 of
+ [{',',_}|Rest21] ->
+ parse_ExtensionAdditionAlternatives(Rest21,[NamedType|Acc]);
+ [{']]',_}|Rest21] ->
+ {lists:reverse(Acc),Rest21};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,[',',']]']]}})
+ end.
+
+parse_NamedType([{identifier,L1,Idname}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#'ComponentType'{pos=L1,name=Idname,typespec=Type,prop=mandatory},Rest2};
+parse_NamedType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+
+parse_ComponentTypeLists(Tokens) ->
+% Resulting tuple {ComponentTypeList,Rest1} is returned
+ case Tokens of
+ [{identifier,_,_}|_Rest0] ->
+ {Clist,Rest01} = parse_ComponentTypeList(Tokens),
+ case Rest01 of
+ [{',',_}|Rest02] ->
+ parse_ComponentTypeLists(Rest02,Clist);
+ _ ->
+ {Clist,Rest01}
+ end;
+ [{'COMPONENTS',_},{'OF',_}|_Rest] ->
+ {Clist,Rest01} = parse_ComponentTypeList(Tokens),
+ case Rest01 of
+ [{',',_}|Rest02] ->
+ parse_ComponentTypeLists(Rest02,Clist);
+ _ ->
+ {Clist,Rest01}
+ end;
+ _ ->
+ parse_ComponentTypeLists(Tokens,[])
+ end.
+
+parse_ComponentTypeLists([{'...',L1},{'!',_}|Rest],Clist1) ->
+ {_,Rest2} = parse_ExceptionIdentification(Rest),
+ %% Exception info is currently thrown away
+ parse_ComponentTypeLists2(Rest2,Clist1++[#'EXTENSIONMARK'{pos=L1}]);
+parse_ComponentTypeLists([{'...',L1}|Rest],Clist1) ->
+ parse_ComponentTypeLists2(Rest,Clist1++[#'EXTENSIONMARK'{pos=L1}]);
+parse_ComponentTypeLists(Tokens,Clist1) ->
+ {Clist1,Tokens}.
+
+
+parse_ComponentTypeLists2(Tokens,Clist1) ->
+ {ExtensionAddition,Rest2} =
+ case Tokens of
+ [{',',_}|Rest1] ->
+ parse_ExtensionAdditionList(Rest1);
+ _ ->
+ {[],Tokens}
+ end,
+ {OptionalExtensionMarker,Rest3} =
+ case Rest2 of
+ [{',',_},{'...',L2}|Rest21] ->
+ {[#'EXTENSIONMARK'{pos=L2}],Rest21};
+ _ ->
+ {[],Rest2}
+ end,
+ {RootComponentTypeList,Rest4} =
+ case Rest3 of
+ [{',',_}|Rest31] ->
+ parse_ComponentTypeList(Rest31);
+ _ ->
+ {[],Rest3}
+ end,
+ {Clist1 ++ ExtensionAddition ++ OptionalExtensionMarker ++ RootComponentTypeList, Rest4}.
+
+
+parse_ComponentTypeList(Tokens) ->
+ parse_ComponentTypeList(Tokens,[]).
+
+parse_ComponentTypeList(Tokens,Acc) ->
+ {ComponentType,Rest} = parse_ComponentType(Tokens),
+ case Rest of
+ [{',',_},Id = {identifier,_,_}|Rest2] ->
+ parse_ComponentTypeList([Id|Rest2],[ComponentType|Acc]);
+ [{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest2] ->
+ parse_ComponentTypeList([C1,C2|Rest2],[ComponentType|Acc]);
+% _ ->
+% {lists:reverse([ComponentType|Acc]),Rest}
+ [{'}',_}|_] ->
+ {lists:reverse([ComponentType|Acc]),Rest};
+ [{',',_},{'...',_}|_] ->
+ {lists:reverse([ComponentType|Acc]),Rest};
+ _ ->
+ throw({asn1_error,
+ {get_line(hd(Tokens)),get(asn1_module),
+ [got,[get_token(hd(Rest)),get_token(hd(tl(Rest)))],
+ expected,['}',', identifier']]}})
+ end.
+
+
+parse_ExtensionAdditionList(Tokens) ->
+ parse_ExtensionAdditionList(Tokens,[]).
+
+parse_ExtensionAdditionList(Tokens,Acc) ->
+ {Element,Rest0} =
+ case Tokens of
+ [{identifier,_,_}|_Rest] ->
+ parse_ComponentType(Tokens);
+ [{'[[',_}|_] ->
+ parse_ExtensionAdditions(Tokens);
+ _ ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [identifier,'[[']]}})
+ end,
+ case Rest0 of
+ [{',',_}|Rest01] ->
+ parse_ExtensionAdditionList(Rest01,[Element|Acc]);
+ _ ->
+ {lists:reverse([Element|Acc]),Rest0}
+ end.
+
+parse_ExtensionAdditions([{'[[',_}|Rest]) ->
+ parse_ExtensionAdditions(Rest,[]);
+parse_ExtensionAdditions(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'[[']}}).
+
+parse_ExtensionAdditions([Id = {identifier,_,_}|Rest],Acc) ->
+ {ComponentType, Rest2} = parse_ComponentType([Id|Rest]),
+ case Rest2 of
+ [{',',_}|Rest21] ->
+ parse_ExtensionAdditions(Rest21,[ComponentType|Acc]);
+ [{']]',_}|Rest21] ->
+ {lists:reverse(Acc),Rest21};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,[',',']]']]}})
+ end;
+parse_ExtensionAdditions(Tokens,_) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_ComponentType([{'COMPONENTS',_},{'OF',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'COMPONENTS OF',Type},Rest2};
+parse_ComponentType(Tokens) ->
+ {NamedType,Rest} = parse_NamedType(Tokens),
+ case Rest of
+ [{'OPTIONAL',_}|Rest2] ->
+ {NamedType#'ComponentType'{prop='OPTIONAL'},Rest2};
+ [{'DEFAULT',_}|Rest2] ->
+ {Value,Rest21} = parse_Value(Rest2),
+ {NamedType#'ComponentType'{prop={'DEFAULT',Value}},Rest21};
+ _ ->
+ {NamedType,Rest}
+ end.
+
+
+
+parse_SignedNumber([{number,_,Value}|Rest]) ->
+ {Value,Rest};
+parse_SignedNumber([{'-',_},{number,_,Value}|Rest]) ->
+ {-Value,Rest};
+parse_SignedNumber(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [number,'-number']]}}).
+
+parse_Enumerations(Tokens=[{identifier,_,_}|_Rest]) ->
+ parse_Enumerations(Tokens,[]);
+parse_Enumerations([H|_T]) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,identifier]}}).
+
+parse_Enumerations(Tokens = [{identifier,_,_},{'(',_}|_Rest], Acc) ->
+ {NamedNumber,Rest2} = parse_NamedNumber(Tokens),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_Enumerations(Rest3,[NamedNumber|Acc]);
+ _ ->
+ {lists:reverse([NamedNumber|Acc]),Rest2}
+ end;
+parse_Enumerations([{identifier,_,Id}|Rest], Acc) ->
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_Enumerations(Rest2,[Id|Acc]);
+ _ ->
+ {lists:reverse([Id|Acc]),Rest}
+ end;
+parse_Enumerations([{'...',_}|Rest], Acc) ->
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_Enumerations(Rest2,['EXTENSIONMARK'|Acc]);
+ _ ->
+ {lists:reverse(['EXTENSIONMARK'|Acc]),Rest}
+ end;
+parse_Enumerations([H|_T],_) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,identifier]}}).
+
+parse_NamedNumberList(Tokens) ->
+ parse_NamedNumberList(Tokens,[]).
+
+parse_NamedNumberList(Tokens,Acc) ->
+ {NamedNum,Rest} = parse_NamedNumber(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_NamedNumberList(Rest2,[NamedNum|Acc]);
+ _ ->
+ {lists:reverse([NamedNum|Acc]),Rest}
+ end.
+
+parse_NamedNumber([{identifier,_,Name},{'(',_}|Rest]) ->
+ Flist = [fun parse_SignedNumber/1,
+ fun parse_DefinedValue/1],
+ case (catch parse_or(Rest,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {NamedNum,[{')',_}|Rest2]} ->
+ {{'NamedNumber',Name,NamedNum},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'NamedNumberList']}})
+ end;
+parse_NamedNumber(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+
+parse_Tag([{'[',_}|Rest]) ->
+ {Class,Rest2} = parse_Class(Rest),
+ {ClassNumber,Rest3} =
+ case Rest2 of
+ [{number,_,Num}|Rest21] ->
+ {Num,Rest21};
+ _ ->
+ parse_DefinedValue(Rest2)
+ end,
+ case Rest3 of
+ [{']',_}|Rest4] ->
+ {#tag{class=Class,number=ClassNumber},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,']']}})
+ end;
+parse_Tag(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'[']}}).
+
+parse_Class([{'UNIVERSAL',_}|Rest]) ->
+ {'UNIVERSAL',Rest};
+parse_Class([{'APPLICATION',_}|Rest]) ->
+ {'APPLICATION',Rest};
+parse_Class([{'PRIVATE',_}|Rest]) ->
+ {'PRIVATE',Rest};
+parse_Class(Tokens) ->
+ {'CONTEXT',Tokens}.
+
+parse_Value(Tokens) ->
+ Flist = [fun parse_BuiltinValue/1,
+ fun parse_ValueFromObject/1,
+ fun parse_DefinedValue/1],
+
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_BuiltinValue([{bstring,_,Bstr}|Rest]) ->
+ {{bstring,Bstr},Rest};
+parse_BuiltinValue([{hstring,_,Hstr}|Rest]) ->
+ {{hstring,Hstr},Rest};
+parse_BuiltinValue([{'{',_},{'}',_}|Rest]) ->
+ {[],Rest};
+parse_BuiltinValue(Tokens = [{'{',_}|_Rest]) ->
+ Flist = [
+ fun parse_SequenceOfValue/1,
+ fun parse_SequenceValue/1,
+ fun parse_ObjectIdentifierValue/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end;
+parse_BuiltinValue([{identifier,_,IdName},{':',_}|Rest]) ->
+ {Value,Rest2} = parse_Value(Rest),
+ {{'CHOICE',{IdName,Value}},Rest2};
+parse_BuiltinValue([{'NULL',_}|Rest]) ->
+ {'NULL',Rest};
+parse_BuiltinValue([{'TRUE',_}|Rest]) ->
+ {true,Rest};
+parse_BuiltinValue([{'FALSE',_}|Rest]) ->
+ {false,Rest};
+parse_BuiltinValue([{'PLUS-INFINITY',_}|Rest]) ->
+ {'PLUS-INFINITY',Rest};
+parse_BuiltinValue([{'MINUS-INFINITY',_}|Rest]) ->
+ {'MINUS-INFINITY',Rest};
+parse_BuiltinValue([{cstring,_,Cstr}|Rest]) ->
+ {Cstr,Rest};
+parse_BuiltinValue([{number,_,Num}|Rest]) ->
+ {Num,Rest};
+parse_BuiltinValue([{'-',_},{number,_,Num}|Rest]) ->
+ {- Num,Rest};
+parse_BuiltinValue(Tokens) ->
+ parse_ObjectClassFieldValue(Tokens).
+
+%% Externalvaluereference
+parse_DefinedValue([{typereference,L1,Tname},{'.',_},{identifier,_,Idname}|Rest]) ->
+ {#'Externalvaluereference'{pos=L1,module=Tname,value=Idname},Rest};
+%% valuereference
+parse_DefinedValue([Id = {identifier,_,_}|Rest]) ->
+ {identifier2Extvalueref(Id),Rest};
+%% ParameterizedValue
+parse_DefinedValue(Tokens) ->
+ parse_ParameterizedValue(Tokens).
+
+
+parse_SequenceValue([{'{',_}|Tokens]) ->
+ parse_SequenceValue(Tokens,[]);
+parse_SequenceValue(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_SequenceValue([{identifier,_,IdName}|Rest],Acc) ->
+ {Value,Rest2} = parse_Value(Rest),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_SequenceValue(Rest3,[{IdName,Value}|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([{IdName,Value}|Acc]),Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_SequenceValue(Tokens,_Acc) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_SequenceOfValue([{'{',_}|Tokens]) ->
+ parse_SequenceOfValue(Tokens,[]);
+parse_SequenceOfValue(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_SequenceOfValue(Tokens,Acc) ->
+ {Value,Rest2} = parse_Value(Tokens),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_SequenceOfValue(Rest3,[Value|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([Value|Acc]),Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end.
+
+parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {ValueSet,Rest4} = parse_ValueSet(Rest3),
+ {#valuedef{pos=L1,name=Name,type=Type,value=ValueSet},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(L1),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ValueSetTypeAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ValueSet([{'{',_}|Rest]) ->
+ {Elems,Rest2} = parse_ElementSetSpecs(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {{valueset,Elems},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+parse_ValueSet(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ValueAssignment([{identifier,L1,IdName}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Value,Rest4} = parse_Value(Rest3),
+ case lookahead_assignment(Rest4) of
+ ok ->
+ {#valuedef{pos=L1,name=IdName,type=Type,value=Value},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'::=']}})
+ end;
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'::=']}})
+ end;
+parse_ValueAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+%% SizeConstraint
+parse_SubtypeElements([{'SIZE',_}|Tokens]) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ {{'SizeConstraint',Constraint#constraint.c},Rest};
+%% PermittedAlphabet
+parse_SubtypeElements([{'FROM',_}|Tokens]) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ {{'PermittedAlphabet',Constraint#constraint.c},Rest};
+%% InnerTypeConstraints
+parse_SubtypeElements([{'WITH',_},{'COMPONENT',_}|Tokens]) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ {{'WITH COMPONENT',Constraint},Rest};
+parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_},{'...',_},{',',_}|Tokens]) ->
+ {Constraint,Rest} = parse_TypeConstraints(Tokens),
+ case Rest of
+ [{'}',_}|Rest2] ->
+ {{'WITH COMPONENTS',{'PartialSpecification',Constraint}},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'}']}})
+ end;
+parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
+ {Constraint,Rest} = parse_TypeConstraints(Tokens),
+ case Rest of
+ [{'}',_}|Rest2] ->
+ {{'WITH COMPONENTS',{'FullSpecification',Constraint}},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'}']}})
+ end;
+%% SingleValue
+%% ContainedSubtype
+%% ValueRange
+%% TypeConstraint
+parse_SubtypeElements(Tokens) ->
+ Flist = [fun parse_ContainedSubtype/1,
+ fun parse_Value/1,
+ fun([{'MIN',_}|T]) -> {'MIN',T} end,
+ fun parse_Type/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason} ->
+ throw(Reason);
+ Result = {Val,_} when record(Val,type) ->
+ Result;
+ {Lower,[{'..',_}|Rest]} ->
+ {Upper,Rest2} = parse_UpperEndpoint(Rest),
+ {{'ValueRange',{Lower,Upper}},Rest2};
+ {Lower,[{'<',_},{'..',_}|Rest]} ->
+ {Upper,Rest2} = parse_UpperEndpoint(Rest),
+ {{'ValueRange',{{gt,Lower},Upper}},Rest2};
+ {Res={'ContainedSubtype',_Type},Rest} ->
+ {Res,Rest};
+ {Value,Rest} ->
+ {{'SingleValue',Value},Rest}
+ end.
+
+parse_ContainedSubtype([{'INCLUDES',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'ContainedSubtype',Type},Rest2};
+parse_ContainedSubtype(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'INCLUDES']}}).
+%%parse_ContainedSubtype(Tokens) -> %this option is moved to parse_SubtypeElements
+%% parse_Type(Tokens).
+
+parse_UpperEndpoint([{'<',_}|Rest]) ->
+ parse_UpperEndpoint(lt,Rest);
+parse_UpperEndpoint(Tokens) ->
+ parse_UpperEndpoint(false,Tokens).
+
+parse_UpperEndpoint(Lt,Tokens) ->
+ Flist = [ fun([{'MAX',_}|T]) -> {'MAX',T} end,
+ fun parse_Value/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {Value,Rest2} when Lt == lt ->
+ {{lt,Value},Rest2};
+ {Value,Rest2} ->
+ {Value,Rest2}
+ end.
+
+parse_TypeConstraints(Tokens) ->
+ parse_TypeConstraints(Tokens,[]).
+
+parse_TypeConstraints([{identifier,_,_}|Rest],Acc) ->
+ {ComponentConstraint,Rest2} = parse_ComponentConstraint(Rest),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_TypeConstraints(Rest3,[ComponentConstraint|Acc]);
+ _ ->
+ {lists:reverse([ComponentConstraint|Acc]),Rest2}
+ end;
+parse_TypeConstraints([H|_T],_) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,identifier]}}).
+
+parse_ComponentConstraint(Tokens = [{'(',_}|_Rest]) ->
+ {ValueConstraint,Rest2} = parse_Constraint(Tokens),
+ {PresenceConstraint,Rest3} = parse_PresenceConstraint(Rest2),
+ {{ValueConstraint,PresenceConstraint},Rest3};
+parse_ComponentConstraint(Tokens) ->
+ {PresenceConstraint,Rest} = parse_PresenceConstraint(Tokens),
+ {{asn1_empty,PresenceConstraint},Rest}.
+
+parse_PresenceConstraint([{'PRESENT',_}|Rest]) ->
+ {'PRESENT',Rest};
+parse_PresenceConstraint([{'ABSENT',_}|Rest]) ->
+ {'ABSENT',Rest};
+parse_PresenceConstraint([{'OPTIONAL',_}|Rest]) ->
+ {'OPTIONAL',Rest};
+parse_PresenceConstraint(Tokens) ->
+ {asn1_empty,Tokens}.
+
+
+merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
+ {merge_constraints(Rlist,[],[]),
+ merge_constraints(ExtList,[],[])};
+
+merge_constraints(Clist) ->
+ merge_constraints(Clist, [], []).
+
+merge_constraints([Ch|Ct],Cacc, Eacc) ->
+ NewEacc = case Ch#constraint.e of
+ undefined -> Eacc;
+ E -> [E|Eacc]
+ end,
+ merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
+
+merge_constraints([],Cacc,[]) ->
+%% lists:flatten(Cacc);
+ lists:reverse(Cacc);
+merge_constraints([],Cacc,Eacc) ->
+%% lists:flatten(Cacc) ++ [{'Errors',Eacc}].
+ lists:reverse(Cacc) ++ [{'Errors',Eacc}].
+
+fixup_constraint(C) ->
+ case C of
+ {'SingleValue',SubType} when element(1,SubType) == 'ContainedSubtype' ->
+ SubType;
+ {'SingleValue',V} when list(V) ->
+ C;
+ %% [C,{'ValueRange',{lists:min(V),lists:max(V)}}];
+ %% bug, turns wrong when an element in V is a reference to a defined value
+ {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
+ %%sort and remove duplicates
+ V2 = {'SingleValue',
+ ordsets:list_to_set(lists:flatten(V))},
+ {'PermittedAlphabet',V2};
+ {'PermittedAlphabet',{'SingleValue',V}} ->
+ V2 = {'SingleValue',[V]},
+ {'PermittedAlphabet',V2};
+ {'SizeConstraint',Sc} ->
+ {'SizeConstraint',fixup_size_constraint(Sc)};
+
+ List when list(List) -> %% In This case maybe a union or intersection
+ [fixup_constraint(Xc)||Xc <- List];
+ Other ->
+ Other
+ end.
+
+fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
+ {Lb,Ub};
+fixup_size_constraint({{'ValueRange',R},[]}) ->
+ {R,[]};
+fixup_size_constraint({[],{'ValueRange',R}}) ->
+ {[],R};
+fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
+ {R1,R2};
+fixup_size_constraint({'SingleValue',[Sv]}) ->
+ fixup_size_constraint({'SingleValue',Sv});
+fixup_size_constraint({'SingleValue',L}) when list(L) ->
+ ordsets:list_to_set(L);
+fixup_size_constraint({'SingleValue',L}) ->
+ {L,L};
+fixup_size_constraint({C1,C2}) ->
+ {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
+
+get_line({_,Pos,Token}) when integer(Pos),atom(Token) ->
+ Pos;
+get_line({Token,Pos}) when integer(Pos),atom(Token) ->
+ Pos;
+get_line(_) ->
+ undefined.
+
+get_token({_,Pos,Token}) when integer(Pos),atom(Token) ->
+ Token;
+get_token({'$end',Pos}) when integer(Pos) ->
+ undefined;
+get_token({Token,Pos}) when integer(Pos),atom(Token) ->
+ Token;
+get_token(_) ->
+ undefined.
+
+prioritize_error(ErrList) ->
+ case lists:keymember(asn1_error,1,ErrList) of
+ false -> % only asn1_assignment_error -> take the last
+ lists:last(ErrList);
+ true -> % contains errors from deeper in a Type
+ NewErrList = [_Err={_,_}|_RestErr] =
+ lists:filter(fun({asn1_error,_})->true;(_)->false end,
+ ErrList),
+ SplitErrs =
+ lists:splitwith(fun({_,X})->
+ case element(1,X) of
+ Int when integer(Int) -> true;
+ _ -> false
+ end
+ end,
+ NewErrList),
+ case SplitErrs of
+ {[],UndefPosErrs} -> % if no error with Positon exists
+ lists:last(UndefPosErrs);
+ {IntPosErrs,_} ->
+ IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
+ SortedReasons = lists:keysort(1,IntPosReasons),
+ {asn1_error,lists:last(SortedReasons)}
+ end
+ end.
+
+%% most_prio_error([H={_,Reason}|T],Atom,Err) when atom(Atom) ->
+%% most_prio_error(T,element(1,Reason),H);
+%% most_prio_error([H={_,Reason}|T],Greatest,Err) ->
+%% case element(1,Reason) of
+%% Pos when integer(Pos),Pos>Greatest ->
+%% most_prio_error(
+
+
+tref2Exttref(#typereference{pos=Pos,val=Name}) ->
+ #'Externaltypereference'{pos=Pos,
+ module=get(asn1_module),
+ type=Name}.
+
+tref2Exttref(Pos,Name) ->
+ #'Externaltypereference'{pos=Pos,
+ module=get(asn1_module),
+ type=Name}.
+
+identifier2Extvalueref(#identifier{pos=Pos,val=Name}) ->
+ #'Externalvaluereference'{pos=Pos,
+ module=get(asn1_module),
+ value=Name}.
+
+%% lookahead_assignment/1 checks that the next sequence of tokens
+%% in Token contain a valid assignment or the
+%% 'END' token. Otherwise an exception is thrown.
+lookahead_assignment([{'END',_}|_Rest]) ->
+ ok;
+lookahead_assignment(Tokens) ->
+ parse_Assignment(Tokens),
+ ok.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_pretty_format.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_pretty_format.erl
new file mode 100644
index 0000000000..e0abcd36ec
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_pretty_format.erl
@@ -0,0 +1,199 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_pretty_format.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+
+%% usage: pretty_format:term(Term) -> PNF list of characters
+%%
+%% Note: this is usually used in expressions like:
+%% io:format('~s\n',[pretty_format:term(Term)]).
+%%
+%% Uses the following simple heuristics
+%%
+%% 1) Simple tuples are printed across the page
+%% (Simple means *all* the elements are "flat")
+%% 2) The Complex tuple {Arg1, Arg2, Arg3,....} is printed thus:
+%% {Arg1,
+%% Arg2,
+%% Arg3,
+%% ...}
+%% 3) Lists are treated as for tuples
+%% 4) Lists of printable characters are treated as strings
+%%
+%% This method seems to work reasonable well for {Tag, ...} type
+%% data structures
+
+-module(asn1ct_pretty_format).
+
+-export([term/1]).
+
+-import(io_lib, [write/1, write_string/1]).
+
+term(Term) ->
+ element(2, term(Term, 0)).
+
+%%______________________________________________________________________
+%% pretty_format:term(Term, Indent} -> {Indent', Chars}
+%% Format <Term> -- use <Indent> to indent the *next* line
+%% Note: Indent' is a new indentaion level (sometimes printing <Term>
+%% the next line to need an "extra" indent!).
+
+term([], Indent) ->
+ {Indent, [$[,$]]};
+term(L, Indent) when is_list(L) ->
+ case is_string(L) of
+ true ->
+ {Indent, write_string(L)};
+ false ->
+ case complex_list(L) of
+ true ->
+ write_complex_list(L, Indent);
+ false ->
+ write_simple_list(L, Indent)
+ end
+ end;
+term(T, Indent) when is_tuple(T) ->
+ case complex_tuple(T) of
+ true ->
+ write_complex_tuple(T, Indent);
+ false ->
+ write_simple_tuple(T, Indent)
+ end;
+term(A, Indent) ->
+ {Indent, write(A)}.
+
+%%______________________________________________________________________
+%% write_simple_list([H|T], Indent) -> {Indent', Chars}
+
+write_simple_list([H|T], Indent) ->
+ {_, S1} = term(H, Indent),
+ {_, S2} = write_simple_list_tail(T, Indent),
+ {Indent, [$[,S1|S2]}.
+
+write_simple_list_tail([H|T], Indent) ->
+ {_, S1} = term(H, Indent),
+ {_, S2} = write_simple_list_tail(T, Indent),
+ {Indent, [$,,S1| S2]};
+write_simple_list_tail([], Indent) ->
+ {Indent, "]"};
+write_simple_list_tail(Other, Indent) ->
+ {_, S} = term(Other, Indent),
+ {Indent, [$|,S,$]]}.
+
+%%______________________________________________________________________
+%% write_complex_list([H|T], Indent) -> {Indent', Chars}
+
+write_complex_list([H|T], Indent) ->
+ {I1, S1} = term(H, Indent+1),
+ {_, S2} = write_complex_list_tail(T, I1),
+ {Indent, [$[,S1|S2]}.
+
+write_complex_list_tail([H|T], Indent) ->
+ {I1, S1} = term(H, Indent),
+ {_, S2} = write_complex_list_tail(T, I1),
+ {Indent, [$,,nl_indent(Indent),S1,S2]};
+write_complex_list_tail([], Indent) ->
+ {Indent, "]"};
+write_complex_list_tail(Other, Indent) ->$,,
+ {_, S} = term(Other, Indent),
+ {Indent, [$|,S,$]]}.
+
+%%______________________________________________________________________
+%% complex_list(List) -> true | false
+%% returns true if the list is complex otherwise false
+
+complex_list([]) ->
+ false;
+complex_list([H|T]) when is_number(H); is_atom(H) ->
+ complex_list(T);
+complex_list([H|T]) ->
+ case is_string(H) of
+ true ->
+ complex_list(T);
+ false ->
+ true
+ end;
+complex_list(_) -> true.
+
+%%______________________________________________________________________
+%% complex_tuple(Tuple) -> true | false
+%% returns true if the tuple is complex otherwise false
+
+complex_tuple(T) ->
+ complex_list(tuple_to_list(T)).
+
+%%______________________________________________________________________
+%% write_simple_tuple(Tuple, Indent} -> {Indent', Chars}
+
+write_simple_tuple({}, Indent) ->
+ {Indent, "{}"};
+write_simple_tuple(Tuple, Indent) ->
+ {_, S} = write_simple_tuple_args(tuple_to_list(Tuple), Indent),
+ {Indent, [${, S, $}]}.
+
+write_simple_tuple_args([X], Indent) ->
+ term(X, Indent);
+write_simple_tuple_args([H|T], Indent) ->
+ {_, SH} = term(H, Indent),
+ {_, ST} = write_simple_tuple_args(T, Indent),
+ {Indent, [SH, $,, ST]}.
+
+%%______________________________________________________________________
+%% write_complex_tuple(Tuple, Indent} -> {Indent', Chars}
+
+write_complex_tuple(Tuple, Indent) ->
+ [H|T] = tuple_to_list(Tuple),
+ {I1, SH} = term(H, Indent+2),
+ {_, ST} = write_complex_tuple_args(T, I1),
+ {Indent, [${, SH, ST, $}]}.
+
+write_complex_tuple_args([X], Indent) ->
+ {_, S} = term(X, Indent),
+ {Indent, [$,, nl_indent(Indent), S]};
+write_complex_tuple_args([H|T], Indent) ->
+ {I1, SH} = term(H, Indent),
+ {_, ST} = write_complex_tuple_args(T, I1),
+ {Indent, [$,, nl_indent(Indent) , SH, ST]};
+write_complex_tuple_args([], Indent) ->
+ {Indent, []}.
+
+%%______________________________________________________________________
+%% utilities
+
+nl_indent(I) when I >= 0 ->
+ ["\n"|indent(I)];
+nl_indent(_) ->
+ [$\s].
+
+indent(I) when I >= 8 ->
+ [$\t|indent(I-8)];
+indent(I) when I > 0 ->
+ [$\s|indent(I-1)];
+indent(_) ->
+ [].
+
+is_string([9|T]) ->
+ is_string(T);
+is_string([10|T]) ->
+ is_string(T);
+is_string([H|T]) when H >31, H < 127 ->
+ is_string(T);
+is_string([]) ->
+ true;
+is_string(_) ->
+ false.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_tok.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_tok.erl
new file mode 100644
index 0000000000..3ac1b68b37
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_tok.erl
@@ -0,0 +1,351 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_tok.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_tok).
+
+%% Tokenize ASN.1 code (input to parser generated with yecc)
+
+-export([get_name/2,tokenise/2, file/1]).
+
+
+file(File) ->
+ case file:open(File, [read]) of
+ {error, Reason} ->
+ {error,{File,file:format_error(Reason)}};
+ {ok,Stream} ->
+ process0(Stream)
+ end.
+
+process0(Stream) ->
+ process(Stream,0,[]).
+
+process(Stream,Lno,R) ->
+ process(io:get_line(Stream, ''), Stream,Lno+1,R).
+
+process(eof, Stream,Lno,R) ->
+ file:close(Stream),
+ lists:flatten(lists:reverse([{'$end',Lno}|R]));
+
+
+process(L, Stream,Lno,R) when list(L) ->
+ %%io:format('read:~s',[L]),
+ case catch tokenise(L,Lno) of
+ {'ERR',Reason} ->
+ io:format("Tokeniser error on line: ~w ~w~n",[Lno,Reason]),
+ exit(0);
+ T ->
+ %%io:format('toks:~w~n',[T]),
+ process(Stream,Lno,[T|R])
+ end.
+
+
+tokenise([H|T],Lno) when $a =< H , H =< $z ->
+ {X, T1} = get_name(T, [H]),
+ [{identifier,Lno, list_to_atom(X)}|tokenise(T1,Lno)];
+
+tokenise([$&,H|T],Lno) when $A =< H , H =< $Z ->
+ {Y, T1} = get_name(T, [H]),
+ X = list_to_atom(Y),
+ [{typefieldreference, Lno, X} | tokenise(T1, Lno)];
+
+tokenise([$&,H|T],Lno) when $a =< H , H =< $z ->
+ {Y, T1} = get_name(T, [H]),
+ X = list_to_atom(Y),
+ [{valuefieldreference, Lno, X} | tokenise(T1, Lno)];
+
+tokenise([H|T],Lno) when $A =< H , H =< $Z ->
+ {Y, T1} = get_name(T, [H]),
+ X = list_to_atom(Y),
+ case reserved_word(X) of
+ true ->
+ [{X,Lno}|tokenise(T1,Lno)];
+ false ->
+ [{typereference,Lno,X}|tokenise(T1,Lno)];
+ rstrtype ->
+ [{restrictedcharacterstringtype,Lno,X}|tokenise(T1,Lno)]
+ end;
+
+tokenise([$-,H|T],Lno) when $0 =< H , H =< $9 ->
+ {X, T1} = get_number(T, [H]),
+ [{number,Lno,-1 * list_to_integer(X)}|tokenise(T1,Lno)];
+
+tokenise([H|T],Lno) when $0 =< H , H =< $9 ->
+ {X, T1} = get_number(T, [H]),
+ [{number,Lno,list_to_integer(X)}|tokenise(T1,Lno)];
+
+tokenise([$-,$-|T],Lno) ->
+ tokenise(skip_comment(T),Lno);
+tokenise([$:,$:,$=|T],Lno) ->
+ [{'::=',Lno}|tokenise(T,Lno)];
+
+tokenise([$'|T],Lno) ->
+ case catch collect_quoted(T,Lno,[]) of
+ {'ERR',_} ->
+ throw({'ERR','bad_quote'});
+ {Thing, T1} ->
+ [Thing|tokenise(T1,Lno)]
+ end;
+
+tokenise([$"|T],Lno) ->
+ collect_string(T,Lno);
+
+tokenise([${|T],Lno) ->
+ [{'{',Lno}|tokenise(T,Lno)];
+
+tokenise([$}|T],Lno) ->
+ [{'}',Lno}|tokenise(T,Lno)];
+
+tokenise([$]|T],Lno) ->
+ [{']',Lno}|tokenise(T,Lno)];
+
+tokenise([$[|T],Lno) ->
+ [{'[',Lno}|tokenise(T,Lno)];
+
+tokenise([$,|T],Lno) ->
+ [{',',Lno}|tokenise(T,Lno)];
+
+tokenise([$(|T],Lno) ->
+ [{'(',Lno}|tokenise(T,Lno)];
+tokenise([$)|T],Lno) ->
+ [{')',Lno}|tokenise(T,Lno)];
+
+tokenise([$.,$.,$.|T],Lno) ->
+ [{'...',Lno}|tokenise(T,Lno)];
+
+tokenise([$.,$.|T],Lno) ->
+ [{'..',Lno}|tokenise(T,Lno)];
+
+tokenise([$.|T],Lno) ->
+ [{'.',Lno}|tokenise(T,Lno)];
+tokenise([$^|T],Lno) ->
+ [{'^',Lno}|tokenise(T,Lno)];
+tokenise([$!|T],Lno) ->
+ [{'!',Lno}|tokenise(T,Lno)];
+tokenise([$||T],Lno) ->
+ [{'|',Lno}|tokenise(T,Lno)];
+
+
+tokenise([H|T],Lno) ->
+ case white_space(H) of
+ true ->
+ tokenise(T,Lno);
+ false ->
+ [{list_to_atom([H]),Lno}|tokenise(T,Lno)]
+ end;
+tokenise([],_) ->
+ [].
+
+
+collect_string(L,Lno) ->
+ collect_string(L,Lno,[]).
+
+collect_string([],_,_) ->
+ throw({'ERR','bad_quote found eof'});
+
+collect_string([H|T],Lno,Str) ->
+ case H of
+ $" ->
+ [{cstring,1,lists:reverse(Str)}|tokenise(T,Lno)];
+ Ch ->
+ collect_string(T,Lno,[Ch|Str])
+ end.
+
+
+
+% <name> is letters digits hyphens
+% hypen is not the last character. Hypen hyphen is NOT allowed
+%
+% <identifier> ::= <lowercase> <name>
+
+get_name([$-,Char|T], L) ->
+ case isalnum(Char) of
+ true ->
+ get_name(T,[Char,$-|L]);
+ false ->
+ {lists:reverse(L),[$-,Char|T]}
+ end;
+get_name([$-|T], L) ->
+ {lists:reverse(L),[$-|T]};
+get_name([Char|T], L) ->
+ case isalnum(Char) of
+ true ->
+ get_name(T,[Char|L]);
+ false ->
+ {lists:reverse(L),[Char|T]}
+ end;
+get_name([], L) ->
+ {lists:reverse(L), []}.
+
+
+isalnum(H) when $A =< H , H =< $Z ->
+ true;
+isalnum(H) when $a =< H , H =< $z ->
+ true;
+isalnum(H) when $0 =< H , H =< $9 ->
+ true;
+isalnum(_) ->
+ false.
+
+isdigit(H) when $0 =< H , H =< $9 ->
+ true;
+isdigit(_) ->
+ false.
+
+white_space(9) -> true;
+white_space(10) -> true;
+white_space(13) -> true;
+white_space(32) -> true;
+white_space(_) -> false.
+
+
+get_number([H|T], L) ->
+ case isdigit(H) of
+ true ->
+ get_number(T, [H|L]);
+ false ->
+ {lists:reverse(L), [H|T]}
+ end;
+get_number([], L) ->
+ {lists:reverse(L), []}.
+
+skip_comment([]) ->
+ [];
+skip_comment([$-,$-|T]) ->
+ T;
+skip_comment([_|T]) ->
+ skip_comment(T).
+
+collect_quoted([$',$B|T],Lno, L) ->
+ case check_bin(L) of
+ true ->
+ {{bstring,Lno, lists:reverse(L)}, T};
+ false ->
+ throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ end;
+collect_quoted([$',$H|T],Lno, L) ->
+ case check_hex(L) of
+ true ->
+ {{hstring,Lno, lists:reverse(L)}, T};
+ false ->
+ throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ end;
+collect_quoted([H|T], Lno, L) ->
+ collect_quoted(T, Lno,[H|L]);
+collect_quoted([], _, _) -> % This should be allowed FIX later
+ throw({'ERR',{eol_in_token}}).
+
+check_bin([$0|T]) ->
+ check_bin(T);
+check_bin([$1|T]) ->
+ check_bin(T);
+check_bin([]) ->
+ true;
+check_bin(_) ->
+ false.
+
+check_hex([H|T]) when $0 =< H , H =< $9 ->
+ check_hex(T);
+check_hex([H|T]) when $A =< H , H =< $F ->
+ check_hex(T);
+check_hex([]) ->
+ true;
+check_hex(_) ->
+ false.
+
+
+%% reserved_word(A) -> true|false|rstrtype
+%% A = atom()
+%% returns true if A is a reserved ASN.1 word
+%% returns false if A is not a reserved word
+%% returns rstrtype if A is a reserved word in the group
+%% RestrictedCharacterStringType
+reserved_word('ABSENT') -> true;
+%reserved_word('ABSTRACT-SYNTAX') -> true; % impl as predef item
+reserved_word('ALL') -> true;
+reserved_word('ANY') -> true;
+reserved_word('APPLICATION') -> true;
+reserved_word('AUTOMATIC') -> true;
+reserved_word('BEGIN') -> true;
+reserved_word('BIT') -> true;
+reserved_word('BMPString') -> rstrtype;
+reserved_word('BOOLEAN') -> true;
+reserved_word('BY') -> true;
+reserved_word('CHARACTER') -> true;
+reserved_word('CHOICE') -> true;
+reserved_word('CLASS') -> true;
+reserved_word('COMPONENT') -> true;
+reserved_word('COMPONENTS') -> true;
+reserved_word('CONSTRAINED') -> true;
+reserved_word('DEFAULT') -> true;
+reserved_word('DEFINED') -> true;
+reserved_word('DEFINITIONS') -> true;
+reserved_word('EMBEDDED') -> true;
+reserved_word('END') -> true;
+reserved_word('ENUMERATED') -> true;
+reserved_word('EXCEPT') -> true;
+reserved_word('EXPLICIT') -> true;
+reserved_word('EXPORTS') -> true;
+reserved_word('EXTERNAL') -> true;
+reserved_word('FALSE') -> true;
+reserved_word('FROM') -> true;
+reserved_word('GeneralizedTime') -> true;
+reserved_word('GeneralString') -> rstrtype;
+reserved_word('GraphicString') -> rstrtype;
+reserved_word('IA5String') -> rstrtype;
+% reserved_word('TYPE-IDENTIFIER') -> true; % impl as predef item
+reserved_word('IDENTIFIER') -> true;
+reserved_word('IMPLICIT') -> true;
+reserved_word('IMPORTS') -> true;
+reserved_word('INCLUDES') -> true;
+reserved_word('INSTANCE') -> true;
+reserved_word('INTEGER') -> true;
+reserved_word('INTERSECTION') -> true;
+reserved_word('ISO646String') -> rstrtype;
+reserved_word('MAX') -> true;
+reserved_word('MIN') -> true;
+reserved_word('MINUS-INFINITY') -> true;
+reserved_word('NULL') -> true;
+reserved_word('NumericString') -> rstrtype;
+reserved_word('OBJECT') -> true;
+reserved_word('ObjectDescriptor') -> true;
+reserved_word('OCTET') -> true;
+reserved_word('OF') -> true;
+reserved_word('OPTIONAL') -> true;
+reserved_word('PDV') -> true;
+reserved_word('PLUS-INFINITY') -> true;
+reserved_word('PRESENT') -> true;
+reserved_word('PrintableString') -> rstrtype;
+reserved_word('PRIVATE') -> true;
+reserved_word('REAL') -> true;
+reserved_word('SEQUENCE') -> true;
+reserved_word('SET') -> true;
+reserved_word('SIZE') -> true;
+reserved_word('STRING') -> true;
+reserved_word('SYNTAX') -> true;
+reserved_word('T61String') -> rstrtype;
+reserved_word('TAGS') -> true;
+reserved_word('TeletexString') -> rstrtype;
+reserved_word('TRUE') -> true;
+reserved_word('UNION') -> true;
+reserved_word('UNIQUE') -> true;
+reserved_word('UNIVERSAL') -> true;
+reserved_word('UniversalString') -> rstrtype;
+reserved_word('UTCTime') -> true;
+reserved_word('VideotexString') -> rstrtype;
+reserved_word('VisibleString') -> rstrtype;
+reserved_word('WITH') -> true;
+reserved_word(_) -> false.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_value.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_value.erl
new file mode 100644
index 0000000000..9510e4b341
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1ct_value.erl
@@ -0,0 +1,330 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_value.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_value).
+
+%% Generate Erlang values for ASN.1 types.
+%% The value is randomized within it's constraints
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-export([get_type/3]).
+
+
+
+%% Generate examples of values ******************************
+%%****************************************x
+
+
+get_type(M,Typename,Tellname) ->
+ case asn1_db:dbget(M,Typename) of
+ undefined ->
+ {asn1_error,{not_found,{M,Typename}}};
+ Tdef when record(Tdef,typedef) ->
+ Type = Tdef#typedef.typespec,
+ get_type(M,[Typename],Type,Tellname);
+ Err ->
+ {asn1_error,{other,Err}}
+ end.
+
+get_type(M,Typename,Type,Tellname) when record(Type,type) ->
+ InnerType = get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ get_type(Emod,Etype,Tellname);
+ {_,user} ->
+ case Tellname of
+ yes -> {Typename,get_type(M,InnerType,no)};
+ no -> get_type(M,InnerType,no)
+ end;
+ {notype,_} ->
+ true;
+ {primitive,bif} ->
+ get_type_prim(Type);
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.constraint of
+ [#'Externaltypereference'{type=TrefConstraint}] ->
+ get_type(M,TrefConstraint,no);
+ _ ->
+ "open_type"
+ end;
+ {constructed,bif} ->
+ get_type_constructed(M,Typename,InnerType,Type)
+ end;
+get_type(M,Typename,#'ComponentType'{name = Name,typespec = Type},_) ->
+ get_type(M,[Name|Typename],Type,no);
+get_type(_,_,_,_) -> % 'EXTENSIONMARK'
+ undefined.
+
+get_inner(A) when atom(A) -> A;
+get_inner(Ext) when record(Ext,'Externaltypereference') -> Ext;
+get_inner({typereference,_Pos,Name}) -> Name;
+get_inner(T) when tuple(T) ->
+ case asn1ct_gen:get_inner(T) of
+ {fixedtypevaluefield,_,Type} ->
+ Type#type.def;
+ {typefield,_FieldName} ->
+ 'ASN1_OPEN_TYPE';
+ Other ->
+ Other
+ end.
+%%get_inner(T) when tuple(T) -> element(1,T).
+
+
+
+get_type_constructed(M,Typename,InnerType,D) when record(D,type) ->
+ case InnerType of
+ 'SET' ->
+ get_sequence(M,Typename,D);
+ 'SEQUENCE' ->
+ get_sequence(M,Typename,D);
+ 'CHOICE' ->
+ get_choice(M,Typename,D);
+ 'SEQUENCE OF' ->
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ get_sequence_of(M,Typename,D,NameSuffix);
+ 'SET OF' ->
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ get_sequence_of(M,Typename,D,NameSuffix);
+ _ ->
+ exit({nyi,InnerType})
+ end.
+
+get_sequence(M,Typename,Type) ->
+ {_SEQorSET,CompList} =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> {'SEQUENCE',Cl};
+ #'SET'{components=Cl} -> {'SET',Cl}
+ end,
+ case get_components(M,Typename,CompList) of
+ [] ->
+ {list_to_atom(asn1ct_gen:list2rname(Typename))};
+ C ->
+ list_to_tuple([list_to_atom(asn1ct_gen:list2rname(Typename))|C])
+ end.
+
+get_components(M,Typename,{Root,Ext}) ->
+ get_components(M,Typename,Root++Ext);
+
+%% Should enhance this *** HERE *** with proper handling of extensions
+
+get_components(M,Typename,[H|T]) ->
+ [get_type(M,Typename,H,no)|
+ get_components(M,Typename,T)];
+get_components(_,_,[]) ->
+ [].
+
+get_choice(M,Typename,Type) ->
+ {'CHOICE',TCompList} = Type#type.def,
+ case TCompList of
+ [] ->
+ {asn1_EMPTY,asn1_EMPTY};
+ {CompList,ExtList} -> % Should be enhanced to handle extensions too
+ CList = CompList ++ ExtList,
+ C = lists:nth(random(length(CList)),CList),
+ {C#'ComponentType'.name,get_type(M,Typename,C,no)};
+ CompList when list(CompList) ->
+ C = lists:nth(random(length(CompList)),CompList),
+ {C#'ComponentType'.name,get_type(M,Typename,C,no)}
+ end.
+
+get_sequence_of(M,Typename,Type,TypeSuffix) ->
+ %% should generate length according to constraints later
+ {_,Oftype} = Type#type.def,
+ C = Type#type.constraint,
+ S = size_random(C),
+ NewTypeName = [TypeSuffix|Typename],
+ gen_list(M,NewTypeName,Oftype,no,S).
+
+gen_list(_,_,_,_,0) ->
+ [];
+gen_list(M,Typename,Oftype,Tellname,N) ->
+ [get_type(M,Typename,Oftype,no)|gen_list(M,Typename,Oftype,Tellname,N-1)].
+
+get_type_prim(D) ->
+ C = D#type.constraint,
+ case D#type.def of
+ 'INTEGER' ->
+ i_random(C);
+ {'INTEGER',NamedNumberList} ->
+ NN = [X||{X,_} <- NamedNumberList],
+ case NN of
+ [] ->
+ i_random(C);
+ _ ->
+ lists:nth(random(length(NN)),NN)
+ end;
+ Enum when tuple(Enum),element(1,Enum)=='ENUMERATED' ->
+ NamedNumberList =
+ case Enum of
+ {_,_,NNL} -> NNL;
+ {_,NNL} -> NNL
+ end,
+ NNew=
+ case NamedNumberList of
+ {N1,N2} ->
+ N1 ++ N2;
+ _->
+ NamedNumberList
+ end,
+ NN = [X||{X,_} <- NNew],
+ case NN of
+ [] ->
+ asn1_EMPTY;
+ _ ->
+ lists:nth(random(length(NN)),NN)
+ end;
+ {'BIT STRING',NamedNumberList} ->
+%% io:format("get_type_prim 1: ~w~n",[NamedNumberList]),
+ NN = [X||{X,_} <- NamedNumberList],
+ case NN of
+ [] ->
+ Bl1 =lists:reverse(adjust_list(size_random(C),[1,0,1,1])),
+ lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,Bl1));
+ _ ->
+%% io:format("get_type_prim 2: ~w~n",[NN]),
+ [lists:nth(random(length(NN)),NN)]
+ end;
+ 'ANY' ->
+ exit({asn1_error,nyi,'ANY'});
+ 'NULL' ->
+ 'NULL';
+ 'OBJECT IDENTIFIER' ->
+ Len = random(3),
+ Olist = [(random(1000)-1)||_X <-lists:seq(1,Len)],
+ list_to_tuple([random(3)-1,random(40)-1|Olist]);
+ 'ObjectDescriptor' ->
+ object_descriptor_nyi;
+ 'BOOLEAN' ->
+ true;
+ 'OCTET STRING' ->
+ adjust_list(size_random(C),c_string(C,"OCTET STRING"));
+ 'NumericString' ->
+ adjust_list(size_random(C),c_string(C,"0123456789"));
+ 'TeletexString' ->
+ adjust_list(size_random(C),c_string(C,"TeletexString"));
+ 'VideotexString' ->
+ adjust_list(size_random(C),c_string(C,"VideotexString"));
+ 'UTCTime' ->
+ "97100211-0500";
+ 'GeneralizedTime' ->
+ "19971002103130.5";
+ 'GraphicString' ->
+ adjust_list(size_random(C),c_string(C,"GraphicString"));
+ 'VisibleString' ->
+ adjust_list(size_random(C),c_string(C,"VisibleString"));
+ 'GeneralString' ->
+ adjust_list(size_random(C),c_string(C,"GeneralString"));
+ 'PrintableString' ->
+ adjust_list(size_random(C),c_string(C,"PrintableString"));
+ 'IA5String' ->
+ adjust_list(size_random(C),c_string(C,"IA5String"));
+ 'BMPString' ->
+ adjust_list(size_random(C),c_string(C,"BMPString"));
+ 'UniversalString' ->
+ adjust_list(size_random(C),c_string(C,"UniversalString"));
+ XX ->
+ exit({asn1_error,nyi,XX})
+ end.
+
+c_string(undefined,Default) ->
+ Default;
+c_string(C,Default) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} when list(Sv) ->
+ Sv;
+ {'SingleValue',V} when integer(V) ->
+ [V];
+ no ->
+ Default
+ end.
+
+random(Upper) ->
+ {A1,A2,A3} = erlang:now(),
+ random:seed(A1,A2,A3),
+ random:uniform(Upper).
+
+size_random(C) ->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ c_random({0,5},no);
+ {Lb,Ub} when Ub-Lb =< 4 ->
+ c_random({Lb,Ub},no);
+ {Lb,_} ->
+ c_random({Lb,Lb+4},no);
+ Sv ->
+ c_random(no,Sv)
+ end.
+
+i_random(C) ->
+ c_random(get_constraint(C,'ValueRange'),get_constraint(C,'SingleValue')).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% c_random(Range,SingleValue)
+%% only called from other X_random functions
+
+c_random(VRange,Single) ->
+ case {VRange,Single} of
+ {no,no} ->
+ random(16#fffffff) - (16#fffffff bsr 1);
+ {R,no} ->
+ case R of
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ Range = Ub - Lb +1,
+ Lb + (random(Range)-1);
+ {Lb,'MAX'} ->
+ Lb + random(16#fffffff)-1;
+ {'MIN',Ub} ->
+ Ub - random(16#fffffff)-1;
+ {A,{'ASN1_OK',B}} ->
+ Range = B - A +1,
+ A + (random(Range)-1)
+ end;
+ {_,S} when integer(S) ->
+ S;
+ {_,S} when list(S) ->
+ lists:nth(random(length(S)),S)
+%% {S1,S2} ->
+%% io:format("asn1ct_value: hejsan hoppsan~n");
+%% _ ->
+%% io:format("asn1ct_value: hejsan hoppsan 2~n")
+%% io:format("asn1ct_value: c_random/2: S1 = ~w~n"
+%% "S2 = ~w,~n",[S1,S2])
+%% exit(self(),goodbye)
+ end.
+
+adjust_list(Len,Orig) ->
+ adjust_list1(Len,Orig,Orig,[]).
+
+adjust_list1(0,_Orig,[_Oh|_Ot],Acc) ->
+ lists:reverse(Acc);
+adjust_list1(Len,Orig,[],Acc) ->
+ adjust_list1(Len,Orig,Orig,Acc);
+adjust_list1(Len,Orig,[Oh|Ot],Acc) ->
+ adjust_list1(Len-1,Orig,Ot,[Oh|Acc]).
+
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt.erl
new file mode 100644
index 0000000000..1d73927052
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt.erl
@@ -0,0 +1,69 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt).
+
+%% Runtime functions for ASN.1 (i.e encode, decode)
+
+-export([encode/2,encode/3,decode/3,load_driver/0,unload_driver/0,info/1]).
+
+encode(Module,{Type,Term}) ->
+ encode(Module,Type,Term).
+
+encode(Module,Type,Term) ->
+ case catch apply(Module,encode,[Type,Term]) of
+ {'EXIT',undef} ->
+ {error,{asn1,{undef,Module,Type}}};
+ Result ->
+ Result
+ end.
+
+decode(Module,Type,Bytes) ->
+ case catch apply(Module,decode,[Type,Bytes]) of
+ {'EXIT',undef} ->
+ {error,{asn1,{undef,Module,Type}}};
+ Result ->
+ Result
+ end.
+
+load_driver() ->
+ asn1rt_driver_handler:load_driver(),
+ receive
+ driver_ready ->
+ ok;
+ Err={error,_Reason} ->
+ Err;
+ Error ->
+ {error,Error}
+ end.
+
+unload_driver() ->
+ case catch asn1rt_driver_handler:unload_driver() of
+ ok ->
+ ok;
+ Error ->
+ {error,Error}
+ end.
+
+
+info(Module) ->
+ case catch apply(Module,info,[]) of
+ {'EXIT',{undef,_Reason}} ->
+ {error,{asn1,{undef,Module,info}}};
+ Result ->
+ {ok,Result}
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin.erl
new file mode 100644
index 0000000000..4f4574513e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin.erl
@@ -0,0 +1,2310 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_ber_bin.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt_ber_bin).
+
+%% encoding / decoding of BER
+
+-export([decode/1]).
+-export([fixoptionals/2,split_list/2,cindex/3,restbytes2/3,
+ list_to_record/2,
+ encode_tag_val/1,decode_tag/1,peek_tag/1,
+ check_tags/3, encode_tags/3]).
+-export([encode_boolean/2,decode_boolean/3,
+ encode_integer/3,encode_integer/4,
+ decode_integer/4,decode_integer/5,encode_enumerated/2,
+ encode_enumerated/4,decode_enumerated/5,
+ encode_real/2,decode_real/4,
+ encode_bit_string/4,decode_bit_string/6,
+ decode_compact_bit_string/6,
+ encode_octet_string/3,decode_octet_string/5,
+ encode_null/2,decode_null/3,
+ encode_object_identifier/2,decode_object_identifier/3,
+ encode_restricted_string/4,decode_restricted_string/6,
+ encode_universal_string/3,decode_universal_string/5,
+ encode_BMP_string/3,decode_BMP_string/5,
+ encode_generalized_time/3,decode_generalized_time/5,
+ encode_utc_time/3,decode_utc_time/5,
+ encode_length/1,decode_length/1,
+ check_if_valid_tag/3,
+ decode_tag_and_length/1, decode_components/6,
+ decode_components/7, decode_set/6]).
+
+-export([encode_open_type/1,encode_open_type/2,decode_open_type/1,decode_open_type/2,decode_open_type/3]).
+-export([skipvalue/1, skipvalue/2]).
+
+-include("asn1_records.hrl").
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+%%% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+%%% The tag-number for universal types
+-define(N_BOOLEAN, 1).
+-define(N_INTEGER, 2).
+-define(N_BIT_STRING, 3).
+-define(N_OCTET_STRING, 4).
+-define(N_NULL, 5).
+-define(N_OBJECT_IDENTIFIER, 6).
+-define(N_OBJECT_DESCRIPTOR, 7).
+-define(N_EXTERNAL, 8).
+-define(N_REAL, 9).
+-define(N_ENUMERATED, 10).
+-define(N_EMBEDDED_PDV, 11).
+-define(N_SEQUENCE, 16).
+-define(N_SET, 17).
+-define(N_NumericString, 18).
+-define(N_PrintableString, 19).
+-define(N_TeletexString, 20).
+-define(N_VideotexString, 21).
+-define(N_IA5String, 22).
+-define(N_UTCTime, 23).
+-define(N_GeneralizedTime, 24).
+-define(N_GraphicString, 25).
+-define(N_VisibleString, 26).
+-define(N_GeneralString, 27).
+-define(N_UniversalString, 28).
+-define(N_BMPString, 30).
+
+
+% the complete tag-word of built-in types
+-define(T_BOOLEAN, ?UNIVERSAL bor ?PRIMITIVE bor 1).
+-define(T_INTEGER, ?UNIVERSAL bor ?PRIMITIVE bor 2).
+-define(T_BIT_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 3). % can be CONSTRUCTED
+-define(T_OCTET_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 4). % can be CONSTRUCTED
+-define(T_NULL, ?UNIVERSAL bor ?PRIMITIVE bor 5).
+-define(T_OBJECT_IDENTIFIER,?UNIVERSAL bor ?PRIMITIVE bor 6).
+-define(T_OBJECT_DESCRIPTOR,?UNIVERSAL bor ?PRIMITIVE bor 7).
+-define(T_EXTERNAL, ?UNIVERSAL bor ?PRIMITIVE bor 8).
+-define(T_REAL, ?UNIVERSAL bor ?PRIMITIVE bor 9).
+-define(T_ENUMERATED, ?UNIVERSAL bor ?PRIMITIVE bor 10).
+-define(T_EMBEDDED_PDV, ?UNIVERSAL bor ?PRIMITIVE bor 11).
+-define(T_SEQUENCE, ?UNIVERSAL bor ?CONSTRUCTED bor 16).
+-define(T_SET, ?UNIVERSAL bor ?CONSTRUCTED bor 17).
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_UTCTime, ?UNIVERSAL bor ?PRIMITIVE bor 23).
+-define(T_GeneralizedTime, ?UNIVERSAL bor ?PRIMITIVE bor 24).
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+-define(T_UniversalString, ?UNIVERSAL bor ?PRIMITIVE bor 28). %can be constructed
+-define(T_BMPString, ?UNIVERSAL bor ?PRIMITIVE bor 30). %can be constructed
+
+
+decode(Bin) ->
+ decode_primitive(Bin).
+
+decode_primitive(Bin) ->
+ {Tlv = {Tag,Len,V},<<>>} = decode_tlv(Bin),
+ case element(2,Tag) of
+ ?CONSTRUCTED ->
+ {Tag,Len,decode_constructed(V)};
+ _ ->
+ Tlv
+ end.
+
+decode_constructed(<<>>) ->
+ [];
+decode_constructed(Bin) ->
+ {Tlv = {Tag,Len,V},Rest} = decode_tlv(Bin),
+ NewTlv =
+ case element(2,Tag) of
+ ?CONSTRUCTED ->
+ {Tag,Len,decode_constructed(V)};
+ _ ->
+ Tlv
+ end,
+ [NewTlv|decode_constructed(Rest)].
+
+decode_tlv(Bin) ->
+ {Tag,Bin1,_Rb1} = decode_tag(Bin),
+ {{Len,Bin2},_Rb2} = decode_length(Bin1),
+ <<V:Len/binary,Bin3/binary>> = Bin2,
+ {{Tag,Len,V},Bin3}.
+
+
+
+%%%%%%%%%%%%%
+% split_list(List,HeadLen) -> {HeadList,TailList}
+%
+% splits List into HeadList (Length=HeadLen) and TailList
+% if HeadLen == indefinite -> return {List,indefinite}
+split_list(List,indefinite) ->
+ {List, indefinite};
+split_list(Bin, Len) when binary(Bin) ->
+ split_binary(Bin,Len);
+split_list(List,Len) ->
+ {lists:sublist(List,Len),lists:nthtail(Len,List)}.
+
+
+%%% new function which fixes a bug regarding indefinite length decoding
+restbytes2(indefinite,<<0,0,RemBytes/binary>>,_) ->
+ {RemBytes,2};
+restbytes2(indefinite,RemBytes,ext) ->
+ skipvalue(indefinite,RemBytes);
+restbytes2(RemBytes,<<>>,_) ->
+ {RemBytes,0};
+restbytes2(_RemBytes,Bytes,noext) ->
+ exit({error,{asn1, {unexpected,Bytes}}});
+restbytes2(RemBytes,_Bytes,ext) ->
+ {RemBytes,0}.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% skipvalue(Length, Bytes) -> {RemainingBytes, RemovedNumberOfBytes}
+%%
+%% skips the one complete (could be nested) TLV from Bytes
+%% handles both definite and indefinite length encodings
+%%
+
+skipvalue(L, Bytes) ->
+ skipvalue(L, Bytes, 0).
+
+skipvalue(indefinite, Bytes, Rb) ->
+ {_T,Bytes2,R2} = decode_tag(Bytes),
+ {{L,Bytes3},R3} = decode_length(Bytes2),
+ {Bytes4,Rb4} = case L of
+ indefinite ->
+ skipvalue(indefinite,Bytes3,R2+R3);
+ _ ->
+ <<_:L/binary, RestBytes/binary>> = Bytes3,
+ {RestBytes, R2+R3+L}
+ end,
+ case Bytes4 of
+ <<0,0,Bytes5/binary>> ->
+ {Bytes5,Rb+Rb4+2};
+ _ -> skipvalue(indefinite,Bytes4,Rb+Rb4)
+ end;
+skipvalue(L, Bytes, Rb) ->
+% <<Skip:L/binary, RestBytes/binary>> = Bytes,
+ <<_:L/binary, RestBytes/binary>> = Bytes,
+ {RestBytes,Rb+L}.
+
+%%skipvalue(indefinite, Bytes, Rb) ->
+%% {T,Bytes2,R2} = decode_tag(Bytes),
+%% {L,Bytes3,R3} = decode_length(Bytes2),
+%% {Bytes4,Rb4} = case L of
+%% indefinite ->
+%% skipvalue(indefinite,Bytes3,R2+R3);
+%% _ ->
+%% lists:nthtail(L,Bytes3) %% konstigt !?
+%% end,
+%% case Bytes4 of
+%% [0,0|Bytes5] ->
+%% {Bytes5,Rb4+2};
+%% _ -> skipvalue(indefinite,Bytes4,Rb4)
+%% end;
+%%skipvalue(L, Bytes, Rb) ->
+%% {lists:nthtail(L,Bytes),Rb+L}.
+
+skipvalue(Bytes) ->
+ {_T,Bytes2,R2} = decode_tag(Bytes),
+ {{L,Bytes3},R3} = decode_length(Bytes2),
+ skipvalue(L,Bytes3,R2+R3).
+
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Optionals, preset not filled optionals with asn1_NOVALUE
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,_Acc1,Acc2) ->
+ % return Val as a record
+ list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]).
+
+
+%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%% 8bit Int | binary
+encode_tag_val({Class, Form, TagNo}) when (TagNo =< 30) ->
+ <<(Class bsr 6):2,(Form bsr 5):1,TagNo:5>>;
+
+encode_tag_val({Class, Form, TagNo}) ->
+ {Octets,_Len} = mk_object_val(TagNo),
+ BinOct = list_to_binary(Octets),
+ <<(Class bsr 6):2, (Form bsr 5):1, 31:5,BinOct/binary>>;
+
+%% asumes whole correct tag bitpattern, multiple of 8
+encode_tag_val(Tag) when (Tag =< 255) -> Tag; %% anv�nds denna funktion??!!
+%% asumes correct bitpattern of 0-5
+encode_tag_val(Tag) -> encode_tag_val2(Tag,[]).
+
+encode_tag_val2(Tag, OctAck) when (Tag =< 255) ->
+ [Tag | OctAck];
+encode_tag_val2(Tag, OctAck) ->
+ encode_tag_val2(Tag bsr 8, [255 band Tag | OctAck]).
+
+
+%%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%%% 8bit Int | [list of octets]
+%encode_tag_val({Class, Form, TagNo}) when (TagNo =< 30) ->
+%%% <<Class:2,Form:1,TagNo:5>>;
+% [Class bor Form bor TagNo];
+%encode_tag_val({Class, Form, TagNo}) ->
+% {Octets,L} = mk_object_val(TagNo),
+% [Class bor Form bor 31 | Octets];
+
+
+%%============================================================================\%% Peek on the initial tag
+%% peek_tag(Bytes) -> TagBytes
+%% interprets the first byte and possible second, third and fourth byte as
+%% a tag and returns all the bytes comprising the tag, the constructed/primitive bit (6:th bit of first byte) is normalised to 0
+%%
+
+peek_tag(<<B7_6:2,_:1,31:5,Buffer/binary>>) ->
+ Bin = peek_tag(Buffer, <<>>),
+ <<B7_6:2,31:6,Bin/binary>>;
+%% single tag (tagno < 31)
+peek_tag(<<B7_6:2,_:1,B4_0:5,_Buffer/binary>>) ->
+ <<B7_6:2,B4_0:6>>.
+
+peek_tag(<<0:1,PartialTag:7,_Buffer/binary>>, TagAck) ->
+ <<TagAck/binary,PartialTag>>;
+peek_tag(<<PartialTag,Buffer/binary>>, TagAck) ->
+ peek_tag(Buffer,<<TagAck/binary,PartialTag>>);
+peek_tag(_,TagAck) ->
+ exit({error,{asn1, {invalid_tag,TagAck}}}).
+%%peek_tag([Tag|Buffer]) when (Tag band 31) == 31 ->
+%% [Tag band 2#11011111 | peek_tag(Buffer,[])];
+%%%% single tag (tagno < 31)
+%%peek_tag([Tag|Buffer]) ->
+%% [Tag band 2#11011111].
+
+%%peek_tag([PartialTag|Buffer], TagAck) when (PartialTag < 128 ) ->
+%% lists:reverse([PartialTag|TagAck]);
+%%peek_tag([PartialTag|Buffer], TagAck) ->
+%% peek_tag(Buffer,[PartialTag|TagAck]);
+%%peek_tag(Buffer,TagAck) ->
+%% exit({error,{asn1, {invalid_tag,lists:reverse(TagAck)}}}).
+
+
+%%===============================================================================
+%% Decode a tag
+%%
+%% decode_tag(OctetListBuffer) -> {{Class, Form, TagNo}, RestOfBuffer, RemovedBytes}
+%%===============================================================================
+
+%% multiple octet tag
+decode_tag(<<Class:2, Form:1, 31:5, Buffer/binary>>) ->
+ {TagNo, Buffer1, RemovedBytes} = decode_tag(Buffer, 0, 1),
+ {{(Class bsl 6), (Form bsl 5), TagNo}, Buffer1, RemovedBytes};
+
+%% single tag (< 31 tags)
+decode_tag(<<Class:2,Form:1,TagNo:5, Buffer/binary>>) ->
+ {{(Class bsl 6), (Form bsl 5), TagNo}, Buffer, 1}.
+
+%% last partial tag
+decode_tag(<<0:1,PartialTag:7, Buffer/binary>>, TagAck, RemovedBytes) ->
+ TagNo = (TagAck bsl 7) bor PartialTag,
+ %%<<TagNo>> = <<TagAck:1, PartialTag:7>>,
+ {TagNo, Buffer, RemovedBytes+1};
+% more tags
+decode_tag(<<_:1,PartialTag:7, Buffer/binary>>, TagAck, RemovedBytes) ->
+ TagAck1 = (TagAck bsl 7) bor PartialTag,
+ %%<<TagAck1:16>> = <<TagAck:1, PartialTag:7,0:8>>,
+ decode_tag(Buffer, TagAck1, RemovedBytes+1).
+
+%%------------------------------------------------------------------
+%% check_tags_i is the same as check_tags except that it stops and
+%% returns the remaining tags not checked when it encounters an
+%% indefinite length field
+%% only called internally within this module
+
+check_tags_i([Tag], Buffer, OptOrMand) -> % optimized very usual case
+ {[],check_one_tag(Tag, Buffer, OptOrMand)};
+check_tags_i(Tags, Buffer, OptOrMand) ->
+ check_tags_i(Tags, Buffer, 0, OptOrMand).
+
+check_tags_i([Tag1,Tag2|TagRest], Buffer, Rb, OptOrMand)
+ when Tag1#tag.type == 'IMPLICIT' ->
+ check_tags_i([Tag1#tag{type=Tag2#tag.type}|TagRest], Buffer, Rb, OptOrMand);
+
+check_tags_i([Tag1|TagRest], Buffer, Rb, OptOrMand) ->
+ {Form_Length,Buffer2,Rb1} = check_one_tag(Tag1, Buffer, OptOrMand),
+ case TagRest of
+ [] -> {TagRest, {Form_Length, Buffer2, Rb + Rb1}};
+ _ ->
+ case Form_Length of
+ {?CONSTRUCTED,_} ->
+ {TagRest, {Form_Length, Buffer2, Rb + Rb1}};
+ _ ->
+ check_tags_i(TagRest, Buffer2, Rb + Rb1, mandatory)
+ end
+ end;
+
+check_tags_i([], Buffer, Rb, _) ->
+ {[],{{0,0},Buffer,Rb}}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This function is called from generated code
+
+check_tags([Tag], Buffer, OptOrMand) -> % optimized very usual case
+ check_one_tag(Tag, Buffer, OptOrMand);
+check_tags(Tags, Buffer, OptOrMand) ->
+ check_tags(Tags, Buffer, 0, OptOrMand).
+
+check_tags([Tag1,Tag2|TagRest], Buffer, Rb, OptOrMand)
+ when Tag1#tag.type == 'IMPLICIT' ->
+ check_tags([Tag1#tag{type=Tag2#tag.type}|TagRest], Buffer, Rb, OptOrMand);
+
+check_tags([Tag1|TagRest], Buffer, Rb, OptOrMand) ->
+ {Form_Length,Buffer2,Rb1} = check_one_tag(Tag1, Buffer, OptOrMand),
+ case TagRest of
+ [] -> {Form_Length, Buffer2, Rb + Rb1};
+ _ -> check_tags(TagRest, Buffer2, Rb + Rb1, mandatory)
+ end;
+
+check_tags([], Buffer, Rb, _) ->
+ {{0,0},Buffer,Rb}.
+
+check_one_tag(Tag=#tag{class=ExpectedClass,number=ExpectedNumber}, Buffer, OptOrMand) ->
+ case catch decode_tag(Buffer) of
+ {'EXIT',_Reason} ->
+ tag_error(no_data,Tag,Buffer,OptOrMand);
+ {{ExpectedClass,Form,ExpectedNumber},Buffer2,Rb} ->
+ {{L,Buffer3},RemBytes2} = decode_length(Buffer2),
+ {{Form,L}, Buffer3, RemBytes2+Rb};
+ {ErrorTag,_,_} ->
+ tag_error(ErrorTag, Tag, Buffer, OptOrMand)
+ end.
+
+tag_error(ErrorTag, Tag, Buffer, OptOrMand) ->
+ case OptOrMand of
+ mandatory ->
+ exit({error,{asn1, {invalid_tag,
+ {ErrorTag, Tag, Buffer}}}});
+ _ ->
+ exit({error,{asn1, {no_optional_tag,
+ {ErrorTag, Tag, Buffer}}}})
+ end.
+%%=======================================================================
+%%
+%% Encode all tags in the list Tags and return a possibly deep list of
+%% bytes with tag and length encoded
+%%
+%% prepend_tags(Tags, BytesSoFar, LenSoFar) -> {Bytes, Len}
+encode_tags(Tags, BytesSoFar, LenSoFar) ->
+ NewTags = encode_tags1(Tags, []),
+ %% NewTags contains the resulting tags in reverse order
+ encode_tags2(NewTags, BytesSoFar, LenSoFar).
+
+%encode_tags2([#tag{class=?UNIVERSAL,number=No}|Trest], BytesSoFar, LenSoFar) ->
+% {Bytes2,L2} = encode_length(LenSoFar),
+% encode_tags2(Trest,[[No|Bytes2],BytesSoFar], LenSoFar + 1 + L2);
+encode_tags2([Tag|Trest], BytesSoFar, LenSoFar) ->
+ {Bytes1,L1} = encode_one_tag(Tag),
+ {Bytes2,L2} = encode_length(LenSoFar),
+ encode_tags2(Trest, [Bytes1,Bytes2|BytesSoFar],
+ LenSoFar + L1 + L2);
+encode_tags2([], BytesSoFar, LenSoFar) ->
+ {BytesSoFar,LenSoFar}.
+
+encode_tags1([Tag1, Tag2| Trest], Acc)
+ when Tag1#tag.type == 'IMPLICIT' ->
+ encode_tags1([Tag1#tag{type=Tag2#tag.type,form=Tag2#tag.form}|Trest],Acc);
+encode_tags1([Tag1 | Trest], Acc) ->
+ encode_tags1(Trest, [Tag1|Acc]);
+encode_tags1([], Acc) ->
+ Acc. % the resulting tags are returned in reverse order
+
+encode_one_tag(Bin) when binary(Bin) ->
+ {Bin,size(Bin)};
+encode_one_tag(#tag{class=Class,number=No,type=Type, form = Form}) ->
+ NewForm = case Type of
+ 'EXPLICIT' ->
+ ?CONSTRUCTED;
+ _ ->
+ Form
+ end,
+ Bytes = encode_tag_val({Class,NewForm,No}),
+ {Bytes,size(Bytes)}.
+
+%%===============================================================================
+%% Change the tag (used when an implicit tagged type has a reference to something else)
+%% The constructed bit in the tag is taken from the tag to be replaced.
+%%
+%% change_tag(NewTag,[Tag,Buffer]) -> [NewTag,Buffer]
+%%===============================================================================
+
+%change_tag({NewClass,NewTagNr}, Buffer) ->
+% {{OldClass, OldForm, OldTagNo}, Buffer1, RemovedBytes} = decode_tag(lists:flatten(Buffer)),
+% [encode_tag_val({NewClass, OldForm, NewTagNr}) | Buffer1].
+
+
+
+
+
+
+
+%%===============================================================================
+%%
+%% This comment is valid for all the encode/decode functions
+%%
+%% C = Constraint -> typically {'ValueRange',LowerBound,UpperBound}
+%% used for PER-coding but not for BER-coding.
+%%
+%% Val = Value. If Val is an atom then it is a symbolic integer value
+%% (i.e the atom must be one of the names in the NamedNumberList).
+%% The NamedNumberList is used to translate the atom to an integer value
+%% before encoding.
+%%
+%%===============================================================================
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+
+%% This version does not consider Explicit tagging of the open type. It
+%% is only left because of backward compatibility.
+encode_open_type(Val) when list(Val) ->
+ {Val,size(list_to_binary(Val))};
+encode_open_type(Val) ->
+ {Val, size(Val)}.
+
+%%
+encode_open_type(Val, []) when list(Val) ->
+ {Val,size(list_to_binary(Val))};
+encode_open_type(Val,[]) ->
+ {Val, size(Val)};
+encode_open_type(Val, Tag) when list(Val) ->
+ encode_tags(Tag,Val,size(list_to_binary(Val)));
+encode_open_type(Val,Tag) ->
+ encode_tags(Tag,Val, size(Val)).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer) -> Value
+%% Bytes = [byte] with BER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes) ->
+ {_Tag, Len, _RemainingBuffer, RemovedBytes} = decode_tag_and_length(Bytes),
+ N = Len + RemovedBytes,
+ <<Val:N/binary, RemainingBytes/binary>> = Bytes,
+ {Val, RemainingBytes, Len + RemovedBytes}.
+
+decode_open_type(Bytes,ExplTag) ->
+ {Tag, Len, RemainingBuffer, RemovedBytes} = decode_tag_and_length(Bytes),
+ case {Tag,ExplTag} of
+ {{Class,Form,No},[#tag{class=Class,number=No,form=Form}]} ->
+ {_Tag2, Len2, _RemainingBuffer2, RemovedBytes2} = decode_tag_and_length(RemainingBuffer),
+ N = Len2 + RemovedBytes2,
+ <<_:RemovedBytes/unit:8,Val:N/binary,RemainingBytes/binary>> = Bytes,
+ {Val, RemainingBytes, N + RemovedBytes};
+ _ ->
+ N = Len + RemovedBytes,
+ <<Val:N/binary, RemainingBytes/binary>> = Bytes,
+ {Val, RemainingBytes, Len + RemovedBytes}
+ end.
+
+decode_open_type(ber_bin,Bytes,ExplTag) ->
+ decode_open_type(Bytes,ExplTag);
+decode_open_type(ber,Bytes,ExplTag) ->
+ {Val,RemBytes,Len}=decode_open_type(Bytes,ExplTag),
+ {binary_to_list(Val),RemBytes,Len}.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Boolean, ITU_T X.690 Chapter 8.2
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode_boolean(Integer, tag | notag) -> [octet list]
+%%===============================================================================
+
+encode_boolean({Name, Val}, DoTag) when atom(Name) ->
+ dotag(DoTag, ?N_BOOLEAN, encode_boolean(Val));
+encode_boolean(true,[]) ->
+ {[1,1,16#FF],3};
+encode_boolean(false,[]) ->
+ {[1,1,0],3};
+encode_boolean(Val, DoTag) ->
+ dotag(DoTag, ?N_BOOLEAN, encode_boolean(Val)).
+
+%% encode_boolean(Boolean) -> [Len, Boolean] = [1, $FF | 0]
+encode_boolean(true) -> {[16#FF],1};
+encode_boolean(false) -> {[0],1};
+encode_boolean(X) -> exit({error,{asn1, {encode_boolean, X}}}).
+
+
+%%===============================================================================
+%% decode_boolean(BuffList, HasTag, TotalLen) -> {true, Remain, RemovedBytes} |
+%% {false, Remain, RemovedBytes}
+%%===============================================================================
+
+decode_boolean(Buffer, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_BOOLEAN}),
+ decode_boolean_notag(Buffer, NewTags, OptOrMand).
+
+decode_boolean_notag(Buffer, Tags, OptOrMand) ->
+ {RestTags, {FormLen,Buffer0,Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val,Buffer1,Rb1} = decode_boolean_notag(Buffer00, RestTags, OptOrMand),
+ {Buffer2, Rb2} = restbytes2(RestBytes,Buffer1,noext),
+ {Val, Buffer2, Rb0+Rb1+Rb2};
+ {_,_} ->
+ decode_boolean2(Buffer0, Rb0)
+ end.
+
+decode_boolean2(<<0:8, Buffer/binary>>, RemovedBytes) ->
+ {false, Buffer, RemovedBytes + 1};
+decode_boolean2(<<_:8, Buffer/binary>>, RemovedBytes) ->
+ {true, Buffer, RemovedBytes + 1};
+decode_boolean2(Buffer, _) ->
+ exit({error,{asn1, {decode_boolean, Buffer}}}).
+
+
+
+
+%%===========================================================================
+%% Integer, ITU_T X.690 Chapter 8.3
+
+%% encode_integer(Constraint, Value, Tag) -> [octet list]
+%% encode_integer(Constraint, Name, NamedNumberList, Tag) -> [octet list]
+%% Value = INTEGER | {Name,INTEGER}
+%% Tag = tag | notag
+%%===========================================================================
+
+encode_integer(C, Val, []) when integer(Val) ->
+ {EncVal,Len}=encode_integer(C, Val),
+ dotag_universal(?N_INTEGER,EncVal,Len);
+encode_integer(C, Val, Tag) when integer(Val) ->
+ dotag(Tag, ?N_INTEGER, encode_integer(C, Val));
+encode_integer(C,{Name,Val},Tag) when atom(Name) ->
+ encode_integer(C,Val,Tag);
+encode_integer(_, Val, _) ->
+ exit({error,{asn1, {encode_integer, Val}}}).
+
+
+
+encode_integer(C, Val, NamedNumberList, Tag) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value,{_, NewVal}} ->
+ dotag(Tag, ?N_INTEGER, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {encode_integer_namednumber, Val}}})
+ end;
+encode_integer(C,{_,Val},NamedNumberList,Tag) ->
+ encode_integer(C,Val,NamedNumberList,Tag);
+encode_integer(C, Val, _NamedNumberList, Tag) ->
+ dotag(Tag, ?N_INTEGER, encode_integer(C, Val)).
+
+
+
+
+encode_integer(_C, Val) ->
+ Bytes =
+ if
+ Val >= 0 ->
+ encode_integer_pos(Val, []);
+ true ->
+ encode_integer_neg(Val, [])
+ end,
+ {Bytes,length(Bytes)}.
+
+encode_integer_pos(0, L=[B|_Acc]) when B < 128 ->
+ L;
+encode_integer_pos(N, Acc) ->
+ encode_integer_pos((N bsr 8), [N band 16#ff| Acc]).
+
+encode_integer_neg(-1, L=[B1|_T]) when B1 > 127 ->
+ L;
+encode_integer_neg(N, Acc) ->
+ encode_integer_neg(N bsr 8, [N band 16#ff|Acc]).
+
+%%===============================================================================
+%% decode integer
+%% (Buffer, Range, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%===============================================================================
+
+
+decode_integer(Buffer, Range, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_INTEGER}),
+ decode_integer_notag(Buffer, Range, [], NewTags, OptOrMand).
+
+decode_integer(Buffer, Range, NamedNumberList, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_INTEGER}),
+ decode_integer_notag(Buffer, Range, NamedNumberList, NewTags, OptOrMand).
+
+decode_integer_notag(Buffer, Range, NamedNumberList, NewTags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(NewTags, Buffer, OptOrMand),
+% Result = {Val, Buffer2, RemovedBytes} =
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00, RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_integer_notag(Buffer00, Range, NamedNumberList,
+ RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_, Len} ->
+ Result =
+ decode_integer2(Len,Buffer0,Rb0+Len),
+ Result2 = check_integer_constraint(Result,Range),
+ resolve_named_value(Result2,NamedNumberList)
+ end.
+
+resolve_named_value(Result={Val,Buffer,RemBytes},NamedNumberList) ->
+ case NamedNumberList of
+ [] -> Result;
+ _ ->
+ NewVal = case lists:keysearch(Val, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ Val
+ end,
+ {NewVal, Buffer, RemBytes}
+ end.
+
+check_integer_constraint(Result={Val, _Buffer,_},Range) ->
+ case Range of
+ [] -> % No length constraint
+ Result;
+ {Lb,Ub} when Val >= Lb, Ub >= Val -> % variable length constraint
+ Result;
+ Val -> % fixed value constraint
+ Result;
+ {_,_} ->
+ exit({error,{asn1,{integer_range,Range,Val}}});
+ SingleValue when integer(SingleValue) ->
+ exit({error,{asn1,{integer_range,Range,Val}}});
+ _ -> % some strange constraint that we don't support yet
+ Result
+ end.
+
+%%============================================================================
+%% Enumerated value, ITU_T X.690 Chapter 8.4
+
+%% encode enumerated value
+%%============================================================================
+encode_enumerated(Val, []) when integer(Val)->
+ {EncVal,Len} = encode_integer(false,Val),
+ dotag_universal(?N_ENUMERATED,EncVal,Len);
+encode_enumerated(Val, DoTag) when integer(Val)->
+ dotag(DoTag, ?N_ENUMERATED, encode_integer(false,Val));
+encode_enumerated({Name,Val}, DoTag) when atom(Name) ->
+ encode_enumerated(Val, DoTag).
+
+%% The encode_enumerated functions below this line can be removed when the
+%% new code generation is stable. (the functions might have to be kept here
+%% a while longer for compatibility reasons)
+
+encode_enumerated(C, Val, {NamedNumberList,ExtList}, DoTag) when atom(Val) ->
+ case catch encode_enumerated(C, Val, NamedNumberList, DoTag) of
+ {'EXIT',_} -> encode_enumerated(C, Val, ExtList, DoTag);
+ Result -> Result
+ end;
+
+encode_enumerated(C, Val, NamedNumberList, DoTag) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value, {_, NewVal}} when DoTag == []->
+ {EncVal,Len} = encode_integer(C,NewVal),
+ dotag_universal(?N_ENUMERATED,EncVal,Len);
+ {value, {_, NewVal}} ->
+ dotag(DoTag, ?N_ENUMERATED, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {enumerated_not_in_range, Val}}})
+ end;
+
+encode_enumerated(C, {asn1_enum, Val}, {_,_}, DoTag) when integer(Val) ->
+ dotag(DoTag, ?N_ENUMERATED, encode_integer(C,Val));
+
+encode_enumerated(C, {Name,Val}, NamedNumberList, DoTag) when atom(Name) ->
+ encode_enumerated(C, Val, NamedNumberList, DoTag);
+
+encode_enumerated(_, Val, _, _) ->
+ exit({error,{asn1, {enumerated_not_namednumber, Val}}}).
+
+
+
+%%============================================================================
+%% decode enumerated value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) ->
+%% {Value, RemainingBuffer, RemovedBytes}
+%%===========================================================================
+decode_enumerated(Buffer, Range, NamedNumberList, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_ENUMERATED}),
+ decode_enumerated_notag(Buffer, Range, NamedNumberList,
+ NewTags, OptOrMand).
+
+decode_enumerated_notag(Buffer, Range, NNList = {NamedNumberList,ExtList}, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_enumerated_notag(Buffer00, Range, NNList, RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ {Val01, Buffer01, Rb01} =
+ decode_integer2(Len, Buffer0, Rb0+Len),
+ case decode_enumerated1(Val01, NamedNumberList) of
+ {asn1_enum,Val01} ->
+ {decode_enumerated1(Val01,ExtList), Buffer01, Rb01};
+ Result01 ->
+ {Result01, Buffer01, Rb01}
+ end
+ end;
+
+decode_enumerated_notag(Buffer, Range, NNList, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_enumerated_notag(Buffer00, Range, NNList, RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ {Val01, Buffer02, Rb02} =
+ decode_integer2(Len, Buffer0, Rb0+Len),
+ case decode_enumerated1(Val01, NNList) of
+ {asn1_enum,_} ->
+ exit({error,{asn1, {illegal_enumerated, Val01}}});
+ Result01 ->
+ {Result01, Buffer02, Rb02}
+ end
+ end.
+
+decode_enumerated1(Val, NamedNumberList) ->
+ %% it must be a named integer
+ case lists:keysearch(Val, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ {asn1_enum,Val}
+ end.
+
+
+%%============================================================================
+%%
+%% Real value, ITU_T X.690 Chapter 8.5
+%%============================================================================
+%%
+%% encode real value
+%%============================================================================
+
+%% only base 2 internally so far!!
+encode_real(0, DoTag) ->
+ dotag(DoTag, ?N_REAL, {[],0});
+encode_real('PLUS-INFINITY', DoTag) ->
+ dotag(DoTag, ?N_REAL, {[64],1});
+encode_real('MINUS-INFINITY', DoTag) ->
+ dotag(DoTag, ?N_REAL, {[65],1});
+encode_real(Val, DoTag) when tuple(Val)->
+ dotag(DoTag, ?N_REAL, encode_real(Val)).
+
+%%%%%%%%%%%%%%
+%% not optimal efficient..
+%% only base 2 of Mantissa encoding!
+%% only base 2 of ExpBase encoding!
+encode_real({Man, Base, Exp}) ->
+%% io:format("Mantissa: ~w Base: ~w, Exp: ~w~n",[Man, Base, Exp]),
+
+ OctExp = if Exp >= 0 -> list_to_binary(encode_integer_pos(Exp, []));
+ true -> list_to_binary(encode_integer_neg(Exp, []))
+ end,
+%% ok = io:format("OctExp: ~w~n",[OctExp]),
+ SignBit = if Man > 0 -> 0; % bit 7 is pos or neg, no Zeroval
+ true -> 1
+ end,
+%% ok = io:format("SignBitMask: ~w~n",[SignBitMask]),
+ InBase = if Base =:= 2 -> 0; % bit 6,5: only base 2 this far!
+ true ->
+ exit({error,{asn1, {encode_real_non_supported_encodeing, Base}}})
+ end,
+ SFactor = 0, % bit 4,3: no scaling since only base 2
+ OctExpLen = size(OctExp),
+ if OctExpLen > 255 ->
+ exit({error,{asn1, {to_big_exp_in_encode_real, OctExpLen}}});
+ true -> true %% make real assert later..
+ end,
+ {LenCode, EOctets} = case OctExpLen of % bit 2,1
+ 1 -> {0, OctExp};
+ 2 -> {1, OctExp};
+ 3 -> {2, OctExp};
+ _ -> {3, <<OctExpLen, OctExp/binary>>}
+ end,
+ FirstOctet = <<1:1,SignBit:1,InBase:2,SFactor:2,LenCode:2>>,
+ OctMantissa = if Man > 0 -> list_to_binary(minimum_octets(Man));
+ true -> list_to_binary(minimum_octets(-(Man))) % signbit keeps track of sign
+ end,
+ %% ok = io:format("LenMask: ~w EOctets: ~w~nFirstOctet: ~w OctMantissa: ~w OctExpLen: ~w~n", [LenMask, EOctets, FirstOctet, OctMantissa, OctExpLen]),
+ Bin = <<FirstOctet/binary, EOctets/binary, OctMantissa/binary>>,
+ {Bin, size(Bin)}.
+
+
+%encode_real({Man, Base, Exp}) ->
+%% io:format("Mantissa: ~w Base: ~w, Exp: ~w~n",[Man, Base, Exp]),
+
+% OctExp = if Exp >= 0 -> encode_integer_pos(Exp, []);
+% true -> encode_integer_neg(Exp, [])
+% end,
+%% ok = io:format("OctExp: ~w~n",[OctExp]),
+% SignBitMask = if Man > 0 -> 2#00000000; % bit 7 is pos or neg, no Zeroval
+% true -> 2#01000000
+% end,
+%% ok = io:format("SignBitMask: ~w~n",[SignBitMask]),
+% InternalBaseMask = if Base =:= 2 -> 2#00000000; % bit 6,5: only base 2 this far!
+% true ->
+% exit({error,{asn1, {encode_real_non_supported_encodeing, Base}}})
+% end,
+% ScalingFactorMask =2#00000000, % bit 4,3: no scaling since only base 2
+% OctExpLen = length(OctExp),
+% if OctExpLen > 255 ->
+% exit({error,{asn1, {to_big_exp_in_encode_real, OctExpLen}}});
+% true -> true %% make real assert later..
+% end,
+% {LenMask, EOctets} = case OctExpLen of % bit 2,1
+% 1 -> {0, OctExp};
+% 2 -> {1, OctExp};
+% 3 -> {2, OctExp};
+% _ -> {3, [OctExpLen, OctExp]}
+% end,
+% FirstOctet = (SignBitMask bor InternalBaseMask bor
+% ScalingFactorMask bor LenMask bor
+% 2#10000000), % bit set for binary mantissa encoding!
+% OctMantissa = if Man > 0 -> minimum_octets(Man);
+% true -> minimum_octets(-(Man)) % signbit keeps track of sign
+% end,
+%% ok = io:format("LenMask: ~w EOctets: ~w~nFirstOctet: ~w OctMantissa: ~w OctExpLen: ~w~n", [LenMask, EOctets, FirstOctet, OctMantissa, OctExpLen]),
+% {[FirstOctet, EOctets, OctMantissa],
+% length(OctMantissa) +
+% (if OctExpLen > 3 ->
+% OctExpLen + 2;
+% true ->
+% OctExpLen + 1
+% end)
+% }.
+
+
+%%============================================================================
+%% decode real value
+%%
+%% decode_real([OctetBufferList], tuple|value, tag|notag) ->
+%% {{Mantissa, Base, Exp} | realval | PLUS-INFINITY | MINUS-INFINITY | 0,
+%% RestBuff}
+%%
+%% only for base 2 decoding sofar!!
+%%============================================================================
+
+decode_real(Buffer, Form, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_REAL}),
+ decode_real_notag(Buffer, Form, NewTags, OptOrMand).
+
+decode_real_notag(Buffer, Form, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_real_notag(Buffer00, Form, RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ decode_real2(Buffer0, Form, Len, Rb0)
+ end.
+
+decode_real2(Buffer0, Form, Len, RemBytes1) ->
+ <<First, Buffer2/binary>> = Buffer0,
+ if
+ First =:= 2#01000000 -> {'PLUS-INFINITY', Buffer2};
+ First =:= 2#01000001 -> {'MINUS-INFINITY', Buffer2};
+ First =:= 2#00000000 -> {0, Buffer2};
+ true ->
+ %% have some check here to verify only supported bases (2)
+ <<_B7:1,B6:1,B5_4:2,B3_2:2,B1_0:2>> = <<First>>,
+ Sign = B6,
+ Base =
+ case B5_4 of
+ 0 -> 2; % base 2, only one so far
+ _ -> exit({error,{asn1, {non_supported_base, First}}})
+ end,
+% ScalingFactor =
+ case B3_2 of
+ 0 -> 0; % no scaling so far
+ _ -> exit({error,{asn1, {non_supported_scaling, First}}})
+ end,
+ % ok = io:format("Buffer2: ~w~n",[Buffer2]),
+ {FirstLen, {Exp, Buffer3}, RemBytes2} =
+ case B1_0 of
+ 0 -> {2, decode_integer2(1, Buffer2, RemBytes1), RemBytes1+1};
+ 1 -> {3, decode_integer2(2, Buffer2, RemBytes1), RemBytes1+2};
+ 2 -> {4, decode_integer2(3, Buffer2, RemBytes1), RemBytes1+3};
+ 3 ->
+ <<ExpLen1,RestBuffer/binary>> = Buffer2,
+ { ExpLen1 + 2,
+ decode_integer2(ExpLen1, RestBuffer, RemBytes1),
+ RemBytes1+ExpLen1}
+ end,
+ % io:format("FirstLen: ~w, Exp: ~w, Buffer3: ~w ~n",
+ % [FirstLen, Exp, Buffer3]),
+ Length = Len - FirstLen,
+ <<LongInt:Length/unit:8,RestBuff/binary>> = Buffer3,
+ {{Mantissa, Buffer4}, RemBytes3} =
+ if Sign =:= 0 ->
+ % io:format("sign plus~n"),
+ {{LongInt, RestBuff}, 1 + Length};
+ true ->
+ % io:format("sign minus~n"),
+ {{-LongInt, RestBuff}, 1 + Length}
+ end,
+ % io:format("Form: ~w~n",[Form]),
+ case Form of
+ tuple ->
+ {Val,Buf,_RemB} = Exp,
+ {{Mantissa, Base, {Val,Buf}}, Buffer4, RemBytes2+RemBytes3};
+ _value ->
+ comming
+ end
+ end.
+
+
+%%============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.6
+%%
+%% encode bitstring value
+%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constrint Len, only valid when identifiers
+%%============================================================================
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList,DoTag) when integer(Unused), binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList,DoTag);
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList, DoTag) when atom(FirstVal) ->
+ encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, DoTag);
+
+encode_bit_string(C, [{bit,X} | RestVal], NamedBitList, DoTag) ->
+ encode_bit_string_named(C, [{bit,X} | RestVal], NamedBitList, DoTag);
+
+encode_bit_string(C, [FirstVal| RestVal], NamedBitList, DoTag) when integer(FirstVal) ->
+ encode_bit_string_bits(C, [FirstVal | RestVal], NamedBitList, DoTag);
+
+encode_bit_string(_, 0, _, []) ->
+ {[?N_BIT_STRING,1,0],3};
+
+encode_bit_string(_, 0, _, DoTag) ->
+ dotag(DoTag, ?N_BIT_STRING, {<<0>>,1});
+
+encode_bit_string(_, [], _, []) ->
+ {[?N_BIT_STRING,1,0],3};
+
+encode_bit_string(_, [], _, DoTag) ->
+ dotag(DoTag, ?N_BIT_STRING, {<<0>>,1});
+
+encode_bit_string(C, IntegerVal, NamedBitList, DoTag) when integer(IntegerVal) ->
+ BitListVal = int_to_bitlist(IntegerVal),
+ encode_bit_string_bits(C, BitListVal, NamedBitList, DoTag);
+
+encode_bit_string(C, {Name,BitList}, NamedBitList, DoTag) when atom(Name) ->
+ encode_bit_string(C, BitList, NamedBitList, DoTag).
+
+
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+
+%%=================================================================
+%% Encode BIT STRING of the form {Unused,BinBits}.
+%% Unused is the number of unused bits in the last byte in BinBits
+%% and BinBits is a binary representing the BIT STRING.
+%%=================================================================
+encode_bin_bit_string(C,{Unused,BinBits},_NamedBitList,DoTag)->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ remove_unused_then_dotag(DoTag,?N_BIT_STRING,Unused,BinBits);
+ {_Min,Max} ->
+ BBLen = (size(BinBits)*8)-Unused,
+ if
+ BBLen > Max ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBLen},{maximum,Max}}}}});
+ true ->
+ remove_unused_then_dotag(DoTag,?N_BIT_STRING,
+ Unused,BinBits)
+ end;
+ Size ->
+ case ((size(BinBits)*8)-Unused) of
+ BBSize when BBSize =< Size ->
+ remove_unused_then_dotag(DoTag,?N_BIT_STRING,
+ Unused,BinBits);
+ BBSize ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBSize},{should_be,Size}}}}})
+ end
+ end.
+
+remove_unused_then_dotag(DoTag,StringType,Unused,BinBits) ->
+ case Unused of
+ 0 when (size(BinBits) == 0),DoTag==[] ->
+ %% time optimization of next case
+ {[StringType,1,0],3};
+ 0 when (size(BinBits) == 0) ->
+ dotag(DoTag,StringType,{<<0>>,1});
+ 0 when DoTag==[]-> % time optimization of next case
+ dotag_universal(StringType,[Unused|BinBits],size(BinBits)+1);
+% {LenEnc,Len} = encode_legth(size(BinBits)+1),
+% {[StringType,LenEnc,[Unused|BinBits]],size(BinBits)+1+Len+1};
+ 0 ->
+ dotag(DoTag,StringType,<<Unused,BinBits/binary>>);
+ Num when DoTag == [] -> % time optimization of next case
+ N = (size(BinBits)-1),
+ <<BBits:N/binary,LastByte>> = BinBits,
+ dotag_universal(StringType,
+ [Unused,BBits,(LastByte bsr Num) bsl Num],
+ size(BinBits)+1);
+% {LenEnc,Len} = encode_legth(size(BinBits)+1),
+% {[StringType,LenEnc,[Unused,BBits,(LastByte bsr Num) bsl Num],
+% 1+Len+size(BinBits)+1};
+ Num ->
+ N = (size(BinBits)-1),
+ <<BBits:N/binary,LastByte>> = BinBits,
+ dotag(DoTag,StringType,{[Unused,binary_to_list(BBits) ++
+ [(LastByte bsr Num) bsl Num]],
+ 1+size(BinBits)})
+ end.
+
+
+%%=================================================================
+%% Encode named bits
+%%=================================================================
+
+encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, DoTag) ->
+ {Len,Unused,OctetList} =
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal],
+ NamedBitList, []),
+ BitList = make_and_set_list(lists:max(ToSetPos)+1,
+ ToSetPos, 0),
+ encode_bitstring(BitList);
+ {_Min,Max} ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal],
+ NamedBitList, []),
+ BitList = make_and_set_list(Max, ToSetPos, 0),
+ encode_bitstring(BitList);
+ Size ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal],
+ NamedBitList, []),
+ BitList = make_and_set_list(Size, ToSetPos, 0),
+ encode_bitstring(BitList)
+ end,
+ case DoTag of
+ [] ->
+ dotag_universal(?N_BIT_STRING,[Unused|OctetList],Len+1);
+% {EncLen,LenLen} = encode_length(Len+1),
+% {[?N_BIT_STRING,EncLen,Unused,OctetList],1+LenLen+Len+1};
+ _ ->
+ dotag(DoTag, ?N_BIT_STRING, {[Unused|OctetList],Len+1})
+ end.
+
+
+%%----------------------------------------
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+%%----------------------------------------
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+get_all_bitposes([Val | Rest], NamedBitList, Ack) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+
+%%----------------------------------------
+%% make_and_set_list(Len of list to return, [list of positions to set to 1])->
+%% returns list of Len length, with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%% Len will make a list of length Len, not Len + 1.
+%% BitList = make_and_set_list(C, ToSetPos, 0),
+%%----------------------------------------
+
+make_and_set_list(0, [], _) -> [];
+make_and_set_list(0, _, _) ->
+ exit({error,{asn1,bitstring_sizeconstraint}});
+make_and_set_list(Len, [XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(Len - 1, SetPos, XPos + 1)];
+make_and_set_list(Len, [Pos|SetPos], XPos) ->
+ [0 | make_and_set_list(Len - 1, [Pos | SetPos], XPos + 1)];
+make_and_set_list(Len, [], XPos) ->
+ [0 | make_and_set_list(Len - 1, [], XPos + 1)].
+
+
+
+
+
+
+%%=================================================================
+%% Encode bit string for lists of ones and zeroes
+%%=================================================================
+encode_bit_string_bits(C, BitListVal, _NamedBitList, DoTag) when list(BitListVal) ->
+ {Len,Unused,OctetList} =
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ encode_bitstring(BitListVal);
+ Constr={Min,Max} when integer(Min),integer(Max) ->
+ encode_constr_bit_str_bits(Constr,BitListVal,DoTag);
+ {Constr={_,_},[]} ->
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,DoTag);
+ Constr={{_,_},{_,_}} ->%{{Min1,Max1},{Min2,Max2}}
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,DoTag);
+ Size ->
+ case length(BitListVal) of
+ BitSize when BitSize == Size ->
+ encode_bitstring(BitListVal);
+ BitSize when BitSize < Size ->
+ PaddedList =
+ pad_bit_list(Size-BitSize,BitListVal),
+ encode_bitstring(PaddedList);
+ BitSize ->
+ exit({error,
+ {asn1,
+ {bitstring_length,
+ {{was,BitSize},
+ {should_be,Size}}}}})
+ end
+ end,
+ %%add unused byte to the Len
+ case DoTag of
+ [] ->
+ dotag_universal(?N_BIT_STRING,[Unused|OctetList],Len+1);
+% {EncLen,LenLen}=encode_length(Len+1),
+% {[?N_BIT_STRING,EncLen,Unused|OctetList],1+LenLen+Len+1};
+ _ ->
+ dotag(DoTag, ?N_BIT_STRING,
+ {[Unused | OctetList],Len+1})
+ end.
+
+
+encode_constr_bit_str_bits({_Min,Max},BitListVal,_DoTag) ->
+ BitLen = length(BitListVal),
+ if
+ BitLen > Max ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max}}}}});
+ true ->
+ encode_bitstring(BitListVal)
+ end;
+encode_constr_bit_str_bits({{_Min1,Max1},{Min2,Max2}},BitListVal,_DoTag) ->
+ BitLen = length(BitListVal),
+ case BitLen of
+ Len when Len > Max2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max2}}}}});
+ Len when Len > Max1, Len < Min2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {not_allowed_interval,
+ Max1,Min2}}}}});
+ _ ->
+ encode_bitstring(BitListVal)
+ end.
+
+%% returns a list of length Size + length(BitListVal), with BitListVal
+%% as the most significant elements followed by padded zero elements
+pad_bit_list(Size,BitListVal) ->
+ Tail = lists:duplicate(Size,0),
+ lists:append(BitListVal,Tail).
+
+%%=================================================================
+%% Do the actual encoding
+%% ([bitlist]) -> {ListLen, UnusedBits, OctetList}
+%%=================================================================
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest]) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Val], 1);
+encode_bitstring(Val) ->
+ {Unused, Octet} = unused_bitlist(Val, 7, 0),
+ {1, Unused, [Octet]}.
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest], Ack, Len) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Ack | [Val]], Len + 1);
+%%even multiple of 8 bits..
+encode_bitstring([], Ack, Len) ->
+ {Len, 0, Ack};
+%% unused bits in last octet
+encode_bitstring(Rest, Ack, Len) ->
+% io:format("uneven ~w ~w ~w~n",[Rest, Ack, Len]),
+ {Unused, Val} = unused_bitlist(Rest, 7, 0),
+ {Len + 1, Unused, [Ack | [Val]]}.
+
+%%%%%%%%%%%%%%%%%%
+%% unused_bitlist([list of ones and zeros <= 7], 7, []) ->
+%% {Unused bits, Last octet with bits moved to right}
+unused_bitlist([], Trail, Ack) ->
+ {Trail + 1, Ack};
+unused_bitlist([Bit | Rest], Trail, Ack) ->
+%% io:format("trail Bit: ~w Rest: ~w Trail: ~w Ack:~w~n",[Bit, Rest, Trail, Ack]),
+ unused_bitlist(Rest, Trail - 1, (Bit bsl Trail) bor Ack).
+
+
+%%============================================================================
+%% decode bitstring value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%============================================================================
+
+decode_compact_bit_string(Buffer, Range, NamedNumberList, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags, LenIn,
+ NamedNumberList, OptOrMand,bin).
+
+decode_bit_string(Buffer, Range, NamedNumberList, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags, LenIn,
+ NamedNumberList, OptOrMand,old).
+
+
+decode_bit_string2(1,<<0 ,Buffer/binary>>,_NamedNumberList,RemovedBytes,BinOrOld) ->
+ case BinOrOld of
+ bin ->
+ {{0,<<>>},Buffer,RemovedBytes};
+ _ ->
+ {[], Buffer, RemovedBytes}
+ end;
+decode_bit_string2(Len,<<Unused,Buffer/binary>>,NamedNumberList,
+ RemovedBytes,BinOrOld) ->
+ L = Len - 1,
+ <<Bits:L/binary,BufferTail/binary>> = Buffer,
+ case NamedNumberList of
+ [] ->
+ case BinOrOld of
+ bin ->
+ {{Unused,Bits},BufferTail,RemovedBytes};
+ _ ->
+ BitString = decode_bitstring2(L, Unused, Buffer),
+ {BitString,BufferTail, RemovedBytes}
+ end;
+ _ ->
+ BitString = decode_bitstring2(L, Unused, Buffer),
+ {decode_bitstring_NNL(BitString,NamedNumberList),
+ BufferTail,
+ RemovedBytes}
+ end.
+
+%%----------------------------------------
+%% Decode the in buffer to bits
+%%----------------------------------------
+decode_bitstring2(1,Unused,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,_/binary>>) ->
+ lists:sublist([B7,B6,B5,B4,B3,B2,B1,B0],8-Unused);
+decode_bitstring2(Len, Unused,
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Buffer/binary>>) ->
+ [B7, B6, B5, B4, B3, B2, B1, B0 |
+ decode_bitstring2(Len - 1, Unused, Buffer)].
+
+%%decode_bitstring2(1, Unused, Buffer) ->
+%% make_bits_of_int(hd(Buffer), 128, 8-Unused);
+%%decode_bitstring2(Len, Unused, [BitVal | Buffer]) ->
+%% [B7, B6, B5, B4, B3, B2, B1, B0] = make_bits_of_int(BitVal, 128, 8),
+%% [B7, B6, B5, B4, B3, B2, B1, B0 |
+%% decode_bitstring2(Len - 1, Unused, Buffer)].
+
+
+%%make_bits_of_int(_, _, 0) ->
+%% [];
+%%make_bits_of_int(BitVal, MaskVal, Unused) when Unused > 0 ->
+%% X = case MaskVal band BitVal of
+%% 0 -> 0 ;
+%% _ -> 1
+%% end,
+%% [X | make_bits_of_int(BitVal, MaskVal bsr 1, Unused - 1)].
+
+
+
+%%----------------------------------------
+%% Decode the bitlist to names
+%%----------------------------------------
+
+
+decode_bitstring_NNL(BitList,NamedNumberList) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,0,[]).
+
+
+decode_bitstring_NNL([],_,_No,Result) ->
+ lists:reverse(Result);
+
+decode_bitstring_NNL([B|BitList],[{Name,No}|NamedNumberList],No,Result) ->
+ if
+ B == 0 ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result);
+ true ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[Name|Result])
+ end;
+decode_bitstring_NNL([1|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[{bit,No}|Result]);
+decode_bitstring_NNL([0|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result).
+
+
+%%============================================================================
+%% Octet string, ITU_T X.690 Chapter 8.7
+%%
+%% encode octet string
+%% The OctetList must be a flat list of integers in the range 0..255
+%% the function does not check this because it takes to much time
+%%============================================================================
+encode_octet_string(_C, OctetList, []) when binary(OctetList) ->
+ dotag_universal(?N_OCTET_STRING,OctetList,size(OctetList));
+encode_octet_string(_C, OctetList, DoTag) when binary(OctetList) ->
+ dotag(DoTag, ?N_OCTET_STRING, {OctetList,size(OctetList)});
+encode_octet_string(_C, OctetList, DoTag) when list(OctetList) ->
+ case length(OctetList) of
+ Len when DoTag == [] ->
+ dotag_universal(?N_OCTET_STRING,OctetList,Len);
+ Len ->
+ dotag(DoTag, ?N_OCTET_STRING, {OctetList,Len})
+ end;
+% encode_octet_string(C, OctetList, DoTag) when list(OctetList) ->
+% dotag(DoTag, ?N_OCTET_STRING, {OctetList,length(OctetList)});
+encode_octet_string(C, {Name,OctetList}, DoTag) when atom(Name) ->
+ encode_octet_string(C, OctetList, DoTag).
+
+
+%%============================================================================
+%% decode octet string
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%
+%% Octet string is decoded as a restricted string
+%%============================================================================
+decode_octet_string(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_OCTET_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_OCTET_STRING,
+ Tags, TotalLen, [], OptOrMand,old).
+
+%%============================================================================
+%% Null value, ITU_T X.690 Chapter 8.8
+%%
+%% encode NULL value
+%%============================================================================
+
+encode_null(_, []) ->
+ {[?N_NULL,0],2};
+encode_null(_, DoTag) ->
+ dotag(DoTag, ?N_NULL, {[],0}).
+
+%%============================================================================
+%% decode NULL value
+%% (Buffer, HasTag, TotalLen) -> {NULL, Remain, RemovedBytes}
+%%============================================================================
+decode_null(Buffer, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_NULL}),
+ decode_null_notag(Buffer, NewTags, OptOrMand).
+
+decode_null_notag(Buffer, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {_Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} = decode_null_notag(Buffer0, RestTags,
+ OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,0} ->
+ {'NULL', Buffer0, Rb0};
+ {_,Len} ->
+ exit({error,{asn1,{invalid_length,'NULL',Len}}})
+ end.
+
+
+%%============================================================================
+%% Object identifier, ITU_T X.690 Chapter 8.19
+%%
+%% encode Object Identifier value
+%%============================================================================
+
+encode_object_identifier({Name,Val}, DoTag) when atom(Name) ->
+ encode_object_identifier(Val, DoTag);
+encode_object_identifier(Val, []) ->
+ {EncVal,Len} = e_object_identifier(Val),
+ dotag_universal(?N_OBJECT_IDENTIFIER,EncVal,Len);
+encode_object_identifier(Val, DoTag) ->
+ dotag(DoTag, ?N_OBJECT_IDENTIFIER, e_object_identifier(Val)).
+
+e_object_identifier({'OBJECT IDENTIFIER', V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname, V}) when atom(Cname), tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname, V}) when atom(Cname), list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%%%%%%%%%%%%%%%
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+e_object_identifier([E1, E2 | Tail]) ->
+ Head = 40*E1 + E2, % wow!
+ {H,Lh} = mk_object_val(Head),
+ {R,Lr} = enc_obj_id_tail(Tail, [], 0),
+ {[H|R], Lh+Lr}.
+
+enc_obj_id_tail([], Ack, Len) ->
+ {lists:reverse(Ack), Len};
+enc_obj_id_tail([H|T], Ack, Len) ->
+ {B, L} = mk_object_val(H),
+ enc_obj_id_tail(T, [B|Ack], Len+L).
+
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+%%e_object_identifier([E1, E2 | Tail]) ->
+%% Head = 40*E1 + E2, % wow!
+%% F = fun(Val, AckLen) ->
+%% {L, Ack} = mk_object_val(Val),
+%% {L, Ack + AckLen}
+%% end,
+%% {Octets, Len} = lists:mapfoldl(F, 0, [Head | Tail]).
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+
+
+%%============================================================================
+%% decode Object Identifier value
+%% (Buffer, HasTag, TotalLen) -> {{ObjId}, Remain, RemovedBytes}
+%%============================================================================
+
+decode_object_identifier(Buffer, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,
+ number=?N_OBJECT_IDENTIFIER}),
+ decode_object_identifier_notag(Buffer, NewTags, OptOrMand).
+
+decode_object_identifier_notag(Buffer, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_object_identifier_notag(Buffer00,
+ RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ {[AddedObjVal|ObjVals],Buffer01} =
+ dec_subidentifiers(Buffer0,0,[],Len),
+ {Val1, Val2} = if
+ AddedObjVal < 40 ->
+ {0, AddedObjVal};
+ AddedObjVal < 80 ->
+ {1, AddedObjVal - 40};
+ true ->
+ {2, AddedObjVal - 80}
+ end,
+ {list_to_tuple([Val1, Val2 | ObjVals]), Buffer01,
+ Rb0+Len}
+ end.
+
+dec_subidentifiers(Buffer,_Av,Al,0) ->
+ {lists:reverse(Al),Buffer};
+dec_subidentifiers(<<1:1,H:7,T/binary>>,Av,Al,Len) ->
+ dec_subidentifiers(T,(Av bsl 7) + H,Al,Len-1);
+dec_subidentifiers(<<H,T/binary>>,Av,Al,Len) ->
+ dec_subidentifiers(T,0,[((Av bsl 7) + H)|Al],Len-1).
+
+
+%%dec_subidentifiers(Buffer,Av,Al,0) ->
+%% {lists:reverse(Al),Buffer};
+%%dec_subidentifiers([H|T],Av,Al,Len) when H >=16#80 ->
+%% dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al,Len-1);
+%%dec_subidentifiers([H|T],Av,Al,Len) ->
+%% dec_subidentifiers(T,0,[(Av bsl 7) + H |Al],Len-1).
+
+
+%%============================================================================
+%% Restricted character string types, ITU_T X.690 Chapter 8.20
+%%
+%% encode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%%============================================================================
+encode_restricted_string(_C, OctetList, StringType, [])
+ when binary(OctetList) ->
+ dotag_universal(StringType,OctetList,size(OctetList));
+encode_restricted_string(_C, OctetList, StringType, DoTag)
+ when binary(OctetList) ->
+ dotag(DoTag, StringType, {OctetList, size(OctetList)});
+encode_restricted_string(_C, OctetList, StringType, [])
+ when list(OctetList) ->
+ dotag_universal(StringType,OctetList,length(OctetList));
+encode_restricted_string(_C, OctetList, StringType, DoTag)
+ when list(OctetList) ->
+ dotag(DoTag, StringType, {OctetList, length(OctetList)});
+encode_restricted_string(C,{Name,OctetL},StringType,DoTag) when atom(Name)->
+ encode_restricted_string(C, OctetL, StringType, DoTag).
+
+%%============================================================================
+%% decode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_restricted_string(Buffer, Range, StringType, Tags, LenIn, OptOrMand) ->
+ {Val,Buffer2,Rb} =
+ decode_restricted_string_tag(Buffer, Range, StringType, Tags,
+ LenIn, [], OptOrMand,old),
+ {check_and_convert_restricted_string(Val,StringType,Range,[],old),
+ Buffer2,Rb}.
+
+
+decode_restricted_string(Buffer, Range, StringType, Tags, LenIn, NNList, OptOrMand, BinOrOld ) ->
+ {Val,Buffer2,Rb} =
+ decode_restricted_string_tag(Buffer, Range, StringType, Tags,
+ LenIn, NNList, OptOrMand, BinOrOld),
+ {check_and_convert_restricted_string(Val,StringType,Range,NNList,BinOrOld),
+ Buffer2,Rb}.
+
+decode_restricted_string_tag(Buffer, Range, StringType, TagsIn, LenIn, NNList, OptOrMand, BinOrOld ) ->
+ NewTags = new_tags(TagsIn, #tag{class=?UNIVERSAL,number=StringType}),
+ decode_restricted_string_notag(Buffer, Range, StringType, NewTags,
+ LenIn, NNList, OptOrMand, BinOrOld).
+
+
+
+
+check_and_convert_restricted_string(Val,StringType,Range,NamedNumberList,_BinOrOld) ->
+ {StrLen,NewVal} = case StringType of
+ ?N_BIT_STRING when NamedNumberList /= [] ->
+ {no_check,Val};
+ ?N_BIT_STRING when list(Val) ->
+ {length(Val),Val};
+ ?N_BIT_STRING when tuple(Val) ->
+ {(size(element(2,Val))*8) - element(1,Val),Val};
+ _ when binary(Val) ->
+ {size(Val),binary_to_list(Val)};
+ _ when list(Val) ->
+ {length(Val), Val}
+ end,
+ case Range of
+ _ when StrLen == no_check ->
+ NewVal;
+ [] -> % No length constraint
+ NewVal;
+ {Lb,Ub} when StrLen >= Lb, Ub >= StrLen -> % variable length constraint
+ NewVal;
+ {{Lb,_Ub},[]} when StrLen >= Lb ->
+ NewVal;
+ {{Lb1,Ub1},{Lb2,Ub2}} when StrLen >= Lb1, StrLen =< Ub1;
+ StrLen =< Ub2, StrLen >= Lb2 ->
+ NewVal;
+ StrLen -> % fixed length constraint
+ NewVal;
+ {_,_} ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _Len when integer(_Len) ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _ -> % some strange constraint that we don't support yet
+ NewVal
+ end.
+
+
+%%=============================================================================
+%% Common routines for several string types including bit string
+%% handles indefinite length
+%%=============================================================================
+
+
+decode_restricted_string_notag(Buffer, _Range, StringType, TagsIn,
+ _, NamedNumberList, OptOrMand,BinOrOld) ->
+ %%-----------------------------------------------------------
+ %% Get inner (the implicit tag or no tag) and
+ %% outer (the explicit tag) lengths.
+ %%-----------------------------------------------------------
+ {RestTags, {FormLength={_,_Len01}, Buffer0, Rb0}} =
+ check_tags_i(TagsIn, Buffer, OptOrMand),
+
+ case FormLength of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00, RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_restricted_parts(Buffer00, RestBytes, [], StringType,
+ RestTags,
+ Len, NamedNumberList,
+ OptOrMand,
+ BinOrOld, 0, []),
+ {Val01, Buffer01, Rb0+Rb01};
+ {_, Len} ->
+ {Val01, Buffer01, Rb01} =
+ decode_restricted(Buffer0, Len, StringType,
+ NamedNumberList, BinOrOld),
+ {Val01, Buffer01, Rb0+Rb01}
+ end.
+
+
+decode_restricted_parts(Buffer, RestBytes, [], StringType, RestTags, Len, NNList,
+ OptOrMand, BinOrOld, AccRb, AccVal) ->
+ DecodeFun = case RestTags of
+ [] -> fun decode_restricted_string_tag/8;
+ _ -> fun decode_restricted_string_notag/8
+ end,
+ {Val, Buffer1, Rb} =
+ DecodeFun(Buffer, [], StringType, RestTags,
+ no_length, NNList,
+ OptOrMand, BinOrOld),
+ {Buffer2,More} =
+ case Buffer1 of
+ <<0,0,Buffer10/binary>> when Len == indefinite ->
+ {Buffer10,false};
+ <<>> ->
+ {RestBytes,false};
+ _ ->
+ {Buffer1,true}
+ end,
+ {NewVal, NewRb} =
+ case StringType of
+ ?N_BIT_STRING when BinOrOld == bin ->
+ {concat_bit_binaries(AccVal, Val), AccRb+Rb};
+ _ when binary(Val),binary(AccVal) ->
+ {<<AccVal/binary,Val/binary>>,AccRb+Rb};
+ _ when binary(Val), AccVal==[] ->
+ {Val,AccRb+Rb};
+ _ ->
+ {AccVal++Val, AccRb+Rb}
+ end,
+ case More of
+ false ->
+ {NewVal, Buffer2, NewRb};
+ true ->
+ decode_restricted_parts(Buffer2, RestBytes, [], StringType, RestTags, Len, NNList,
+ OptOrMand, BinOrOld, NewRb, NewVal)
+ end.
+
+
+
+decode_restricted(Buffer, InnerLen, StringType, NamedNumberList,BinOrOld) ->
+
+ case StringType of
+ ?N_BIT_STRING ->
+ decode_bit_string2(InnerLen,Buffer,NamedNumberList,InnerLen,BinOrOld);
+
+ ?N_UniversalString ->
+ <<PreBuff:InnerLen/binary,RestBuff/binary>> = Buffer,%%added for binary
+ UniString = mk_universal_string(binary_to_list(PreBuff)),
+ {UniString,RestBuff,InnerLen};
+ ?N_BMPString ->
+ <<PreBuff:InnerLen/binary,RestBuff/binary>> = Buffer,%%added for binary
+ BMP = mk_BMP_string(binary_to_list(PreBuff)),
+ {BMP,RestBuff,InnerLen};
+ _ ->
+ <<PreBuff:InnerLen/binary,RestBuff/binary>> = Buffer,%%added for binary
+ {PreBuff, RestBuff, InnerLen}
+ end.
+
+
+
+%%============================================================================
+%% encode Universal string
+%%============================================================================
+
+encode_universal_string(C, {Name, Universal}, DoTag) when atom(Name) ->
+ encode_universal_string(C, Universal, DoTag);
+encode_universal_string(_C, Universal, []) ->
+ OctetList = mk_uni_list(Universal),
+ dotag_universal(?N_UniversalString,OctetList,length(OctetList));
+encode_universal_string(_C, Universal, DoTag) ->
+ OctetList = mk_uni_list(Universal),
+ dotag(DoTag, ?N_UniversalString, {OctetList,length(OctetList)}).
+
+mk_uni_list(In) ->
+ mk_uni_list(In,[]).
+
+mk_uni_list([],List) ->
+ lists:reverse(List);
+mk_uni_list([{A,B,C,D}|T],List) ->
+ mk_uni_list(T,[D,C,B,A|List]);
+mk_uni_list([H|T],List) ->
+ mk_uni_list(T,[H,0,0,0|List]).
+
+%%===========================================================================
+%% decode Universal strings
+%% (Buffer, Range, StringType, HasTag, LenIn) ->
+%% {String, Remain, RemovedBytes}
+%%===========================================================================
+
+decode_universal_string(Buffer, Range, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag, #tag{class=?UNIVERSAL,number=?N_UniversalString}),
+ decode_restricted_string(Buffer, Range, ?N_UniversalString,
+ Tags, LenIn, [], OptOrMand,old).
+
+
+mk_universal_string(In) ->
+ mk_universal_string(In,[]).
+
+mk_universal_string([],Acc) ->
+ lists:reverse(Acc);
+mk_universal_string([0,0,0,D|T],Acc) ->
+ mk_universal_string(T,[D|Acc]);
+mk_universal_string([A,B,C,D|T],Acc) ->
+ mk_universal_string(T,[{A,B,C,D}|Acc]).
+
+
+%%============================================================================
+%% encode BMP string
+%%============================================================================
+
+encode_BMP_string(C, {Name,BMPString}, DoTag) when atom(Name)->
+ encode_BMP_string(C, BMPString, DoTag);
+encode_BMP_string(_C, BMPString, []) ->
+ OctetList = mk_BMP_list(BMPString),
+ dotag_universal(?N_BMPString,OctetList,length(OctetList));
+encode_BMP_string(_C, BMPString, DoTag) ->
+ OctetList = mk_BMP_list(BMPString),
+ dotag(DoTag, ?N_BMPString, {OctetList,length(OctetList)}).
+
+mk_BMP_list(In) ->
+ mk_BMP_list(In,[]).
+
+mk_BMP_list([],List) ->
+ lists:reverse(List);
+mk_BMP_list([{0,0,C,D}|T],List) ->
+ mk_BMP_list(T,[D,C|List]);
+mk_BMP_list([H|T],List) ->
+ mk_BMP_list(T,[H,0|List]).
+
+%%============================================================================
+%% decode (OctetList, Range(ignored), tag|notag) -> {ValList, RestList}
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+decode_BMP_string(Buffer, Range, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag, #tag{class=?UNIVERSAL,number=?N_BMPString}),
+ decode_restricted_string(Buffer, Range, ?N_BMPString,
+ Tags, LenIn, [], OptOrMand,old).
+
+mk_BMP_string(In) ->
+ mk_BMP_string(In,[]).
+
+mk_BMP_string([],US) ->
+ lists:reverse(US);
+mk_BMP_string([0,B|T],US) ->
+ mk_BMP_string(T,[B|US]);
+mk_BMP_string([C,D|T],US) ->
+ mk_BMP_string(T,[{0,0,C,D}|US]).
+
+
+%%============================================================================
+%% Generalized time, ITU_T X.680 Chapter 39
+%%
+%% encode Generalized time
+%%============================================================================
+
+encode_generalized_time(C, {Name,OctetList}, DoTag) when atom(Name) ->
+ encode_generalized_time(C, OctetList, DoTag);
+encode_generalized_time(_C, OctetList, []) ->
+ dotag_universal(?N_GeneralizedTime,OctetList,length(OctetList));
+encode_generalized_time(_C, OctetList, DoTag) ->
+ dotag(DoTag, ?N_GeneralizedTime, {OctetList,length(OctetList)}).
+
+%%============================================================================
+%% decode Generalized time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_generalized_time(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,
+ number=?N_GeneralizedTime}),
+ decode_generalized_time_notag(Buffer, Range, NewTags, TotalLen, OptOrMand).
+
+decode_generalized_time_notag(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_generalized_time_notag(Buffer00, Range,
+ RestTags, TotalLen,
+ OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ <<PreBuff:Len/binary,RestBuff/binary>> = Buffer0,
+ {binary_to_list(PreBuff), RestBuff, Rb0+Len}
+ end.
+
+%%============================================================================
+%% Universal time, ITU_T X.680 Chapter 40
+%%
+%% encode UTC time
+%%============================================================================
+
+encode_utc_time(C, {Name,OctetList}, DoTag) when atom(Name) ->
+ encode_utc_time(C, OctetList, DoTag);
+encode_utc_time(_C, OctetList, []) ->
+ dotag_universal(?N_UTCTime, OctetList,length(OctetList));
+encode_utc_time(_C, OctetList, DoTag) ->
+ dotag(DoTag, ?N_UTCTime, {OctetList,length(OctetList)}).
+
+%%============================================================================
+%% decode UTC time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_utc_time(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_UTCTime}),
+ decode_utc_time_notag(Buffer, Range, NewTags, TotalLen, OptOrMand).
+
+decode_utc_time_notag(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_utc_time_notag(Buffer00, Range,
+ RestTags, TotalLen,
+ OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ <<PreBuff:Len/binary,RestBuff/binary>> = Buffer0,
+ {binary_to_list(PreBuff), RestBuff, Rb0+Len}
+ end.
+
+
+%%============================================================================
+%% Length handling
+%%
+%% Encode length
+%%
+%% encode_length(Int | indefinite) ->
+%% [<127]| [128 + Int (<127),OctetList] | [16#80]
+%%============================================================================
+
+encode_length(indefinite) ->
+ {[16#80],1}; % 128
+encode_length(L) when L =< 16#7F ->
+ {[L],1};
+encode_length(L) ->
+ Oct = minimum_octets(L),
+ Len = length(Oct),
+ if
+ Len =< 126 ->
+ {[ (16#80+Len) | Oct ],Len+1};
+ true ->
+ exit({error,{asn1, to_long_length_oct, Len}})
+ end.
+
+
+%% Val must be >= 0
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(0,Acc) ->
+ Acc;
+minimum_octets(Val, Acc) ->
+ minimum_octets((Val bsr 8),[Val band 16#FF | Acc]).
+
+
+%%===========================================================================
+%% Decode length
+%%
+%% decode_length(OctetList) -> {{indefinite, RestOctetsL}, NoRemovedBytes} |
+%% {{Length, RestOctetsL}, NoRemovedBytes}
+%%===========================================================================
+
+decode_length(<<1:1,0:7,T/binary>>) ->
+ {{indefinite, T}, 1};
+decode_length(<<0:1,Length:7,T/binary>>) ->
+ {{Length,T},1};
+decode_length(<<1:1,LL:7,T/binary>>) ->
+ <<Length:LL/unit:8,Rest/binary>> = T,
+ {{Length,Rest}, LL+1}.
+
+%decode_length([128 | T]) ->
+% {{indefinite, T},1};
+%decode_length([H | T]) when H =< 127 ->
+% {{H, T},1};
+%decode_length([H | T]) ->
+% dec_long_length(H band 16#7F, T, 0, 1).
+
+
+%%dec_long_length(0, Buffer, Acc, Len) ->
+%% {{Acc, Buffer},Len};
+%%dec_long_length(Bytes, [H | T], Acc, Len) ->
+%% dec_long_length(Bytes - 1, T, (Acc bsl 8) + H, Len+1).
+
+%%===========================================================================
+%% Decode tag and length
+%%
+%% decode_tag_and_length(Buffer) -> {Tag, Len, RemainingBuffer, RemovedBytes}
+%%
+%%===========================================================================
+
+decode_tag_and_length(Buffer) ->
+ {Tag, Buffer2, RemBytesTag} = decode_tag(Buffer),
+ {{Len, Buffer3}, RemBytesLen} = decode_length(Buffer2),
+ {Tag, Len, Buffer3, RemBytesTag+RemBytesLen}.
+
+
+%%============================================================================
+%% Check if valid tag
+%%
+%% check_if_valid_tag(Tag, List_of_valid_tags, OptOrMand) -> name of the tag
+%%===============================================================================
+
+check_if_valid_tag(<<0,0,_/binary>>,_,_) ->
+ asn1_EOC;
+check_if_valid_tag(<<>>, _, OptOrMand) ->
+ check_if_valid_tag2(false,[],[],OptOrMand);
+check_if_valid_tag(Bytes, ListOfTags, OptOrMand) when binary(Bytes) ->
+ {Tag, _, _} = decode_tag(Bytes),
+ check_if_valid_tag(Tag, ListOfTags, OptOrMand);
+
+%% This alternative should be removed in the near future
+%% Bytes as input should be the only necessary call
+check_if_valid_tag(Tag, ListOfTags, OptOrMand) ->
+ {Class, _Form, TagNo} = Tag,
+ C = code_class(Class),
+ T = case C of
+ 'UNIVERSAL' ->
+ code_type(TagNo);
+ _ ->
+ TagNo
+ end,
+ check_if_valid_tag2({C,T}, ListOfTags, Tag, OptOrMand).
+
+check_if_valid_tag2(_Class_TagNo, [], Tag, mandatory) ->
+ exit({error,{asn1,{invalid_tag,Tag}}});
+check_if_valid_tag2(_Class_TagNo, [], Tag, _) ->
+ exit({error,{asn1,{no_optional_tag,Tag}}});
+
+check_if_valid_tag2(Class_TagNo, [{TagName,TagList}|T], Tag, OptOrMand) ->
+ case check_if_valid_tag_loop(Class_TagNo, TagList) of
+ true ->
+ TagName;
+ false ->
+ check_if_valid_tag2(Class_TagNo, T, Tag, OptOrMand)
+ end.
+
+check_if_valid_tag_loop(_Class_TagNo,[]) ->
+ false;
+check_if_valid_tag_loop(Class_TagNo,[H|T]) ->
+ %% It is not possible to distinguish between SEQUENCE OF and SEQUENCE, and
+ %% between SET OF and SET because both are coded as 16 and 17, respectively.
+ H_without_OF = case H of
+ {C, 'SEQUENCE OF'} ->
+ {C, 'SEQUENCE'};
+ {C, 'SET OF'} ->
+ {C, 'SET'};
+ Else ->
+ Else
+ end,
+
+ case H_without_OF of
+ Class_TagNo ->
+ true;
+ {_,_} ->
+ check_if_valid_tag_loop(Class_TagNo,T);
+ _ ->
+ check_if_valid_tag_loop(Class_TagNo,H),
+ check_if_valid_tag_loop(Class_TagNo,T)
+ end.
+
+
+
+code_class(0) -> 'UNIVERSAL';
+code_class(16#40) -> 'APPLICATION';
+code_class(16#80) -> 'CONTEXT';
+code_class(16#C0) -> 'PRIVATE'.
+
+
+code_type(1) -> 'BOOLEAN';
+code_type(2) -> 'INTEGER';
+code_type(3) -> 'BIT STRING';
+code_type(4) -> 'OCTET STRING';
+code_type(5) -> 'NULL';
+code_type(6) -> 'OBJECT IDENTIFIER';
+code_type(7) -> 'OBJECT DESCRIPTOR';
+code_type(8) -> 'EXTERNAL';
+code_type(9) -> 'REAL';
+code_type(10) -> 'ENUMERATED';
+code_type(11) -> 'EMBEDDED_PDV';
+code_type(16) -> 'SEQUENCE';
+code_type(16) -> 'SEQUENCE OF';
+code_type(17) -> 'SET';
+code_type(17) -> 'SET OF';
+code_type(18) -> 'NumericString';
+code_type(19) -> 'PrintableString';
+code_type(20) -> 'TeletexString';
+code_type(21) -> 'VideotexString';
+code_type(22) -> 'IA5String';
+code_type(23) -> 'UTCTime';
+code_type(24) -> 'GeneralizedTime';
+code_type(25) -> 'GraphicString';
+code_type(26) -> 'VisibleString';
+code_type(27) -> 'GeneralString';
+code_type(28) -> 'UniversalString';
+code_type(30) -> 'BMPString';
+code_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
+
+%%-------------------------------------------------------------------------
+%% decoding of the components of a SET
+%%-------------------------------------------------------------------------
+
+decode_set(Rb, indefinite, <<0,0,Bytes/binary>>, _OptOrMand, _Fun3, Acc) ->
+ {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_set(Rb, indefinite, Bytes, OptOrMand, Fun3, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, OptOrMand),
+ decode_set(Rb+Rb1, indefinite, Remain, OptOrMand, Fun3, [Term|Acc]);
+
+decode_set(Rb, Num, Bytes, _OptOrMand, _Fun3, Acc) when Num == 0 ->
+ {lists:reverse(Acc), Bytes, Rb};
+
+decode_set(_, Num, _, _, _, _) when Num < 0 ->
+ exit({error,{asn1,{length_error,'SET'}}});
+
+decode_set(Rb, Num, Bytes, OptOrMand, Fun3, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, OptOrMand),
+ decode_set(Rb+Rb1, Num-Rb1, Remain, OptOrMand, Fun3, [Term|Acc]).
+
+
+%%-------------------------------------------------------------------------
+%% decoding of SEQUENCE OF and SET OF
+%%-------------------------------------------------------------------------
+
+decode_components(Rb, indefinite, <<0,0,Bytes/binary>>, _Fun3, _TagIn, Acc) ->
+ {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_components(Rb, indefinite, Bytes, Fun3, TagIn, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, mandatory, TagIn),
+ decode_components(Rb+Rb1, indefinite, Remain, Fun3, TagIn, [Term|Acc]);
+
+decode_components(Rb, Num, Bytes, _Fun3, _TagIn, Acc) when Num == 0 ->
+ {lists:reverse(Acc), Bytes, Rb};
+
+decode_components(_, Num, _, _, _, _) when Num < 0 ->
+ exit({error,{asn1,{length_error,'SET/SEQUENCE OF'}}});
+
+decode_components(Rb, Num, Bytes, Fun3, TagIn, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, mandatory, TagIn),
+ decode_components(Rb+Rb1, Num-Rb1, Remain, Fun3, TagIn, [Term|Acc]).
+
+%%decode_components(Rb, indefinite, [0,0|Bytes], _Fun3, _TagIn, Acc) ->
+%% {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_components(Rb, indefinite, <<0,0,Bytes/binary>>, _Fun4, _TagIn, _Fun, Acc) ->
+ {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_components(Rb, indefinite, Bytes, _Fun4, TagIn, _Fun, Acc) ->
+ {Term, Remain, Rb1} = _Fun4(Bytes, mandatory, TagIn, _Fun),
+ decode_components(Rb+Rb1, indefinite, Remain, _Fun4, TagIn, _Fun, [Term|Acc]);
+
+decode_components(Rb, Num, Bytes, _Fun4, _TagIn, _Fun, Acc) when Num == 0 ->
+ {lists:reverse(Acc), Bytes, Rb};
+
+decode_components(_, Num, _, _, _, _, _) when Num < 0 ->
+ exit({error,{asn1,{length_error,'SET/SEQUENCE OF'}}});
+
+decode_components(Rb, Num, Bytes, _Fun4, TagIn, _Fun, Acc) ->
+ {Term, Remain, Rb1} = _Fun4(Bytes, mandatory, TagIn, _Fun),
+ decode_components(Rb+Rb1, Num-Rb1, Remain, _Fun4, TagIn, _Fun, [Term|Acc]).
+
+
+
+%%-------------------------------------------------------------------------
+%% INTERNAL HELPER FUNCTIONS (not exported)
+%%-------------------------------------------------------------------------
+
+
+%%==========================================================================
+%% Encode tag
+%%
+%% dotag(tag | notag, TagValpattern | TagValTuple, [Length, Value]) -> [Tag]
+%% TagValPattern is a correct bitpattern for a tag
+%% TagValTuple is a tuple of three bitpatterns, Class, Form and TagNo where
+%% Class = UNIVERSAL | APPLICATION | CONTEXT | PRIVATE
+%% Form = Primitive | Constructed
+%% TagNo = Number of tag
+%%==========================================================================
+
+
+dotag([], Tag, {Bytes,Len}) ->
+ dotag_universal(Tag,Bytes,Len);
+dotag(Tags, Tag, {Bytes,Len}) ->
+ encode_tags(Tags ++ [#tag{class=?UNIVERSAL,number=Tag,form=?PRIMITIVE}],
+ Bytes, Len);
+
+dotag(Tags, Tag, Bytes) ->
+ encode_tags(Tags ++ [#tag{class=?UNIVERSAL,number=Tag,form=?PRIMITIVE}],
+ Bytes, size(Bytes)).
+
+dotag_universal(UniversalTag,Bytes,Len) when Len =< 16#7F->
+ {[UniversalTag,Len,Bytes],2+Len};
+dotag_universal(UniversalTag,Bytes,Len) ->
+ {EncLen,LenLen}=encode_length(Len),
+ {[UniversalTag,EncLen,Bytes],1+LenLen+Len}.
+
+%% decoding postitive integer values.
+decode_integer2(Len,Bin = <<0:1,_:7,_Bs/binary>>,RemovedBytes) ->
+ <<Int:Len/unit:8,Buffer2/binary>> = Bin,
+ {Int,Buffer2,RemovedBytes};
+%% decoding negative integer values.
+decode_integer2(Len,<<1:1,B2:7,Bs/binary>>,RemovedBytes) ->
+ <<N:Len/unit:8,Buffer2/binary>> = <<B2,Bs/binary>>,
+ Int = N - (1 bsl (8 * Len - 1)),
+ {Int,Buffer2,RemovedBytes}.
+
+%%decode_integer2(Len,Buffer,Acc,RemovedBytes) when (hd(Buffer) band 16#FF) =< 16#7F ->
+%% {decode_integer_pos(Buffer, 8 * (Len - 1)),skip(Buffer,Len),RemovedBytes};
+%%decode_integer2(Len,Buffer,Acc,RemovedBytes) ->
+%% {decode_integer_neg(Buffer, 8 * (Len - 1)),skip(Buffer,Len),RemovedBytes}.
+
+%%decode_integer_pos([Byte|Tail], Shift) ->
+%% (Byte bsl Shift) bor decode_integer_pos(Tail, Shift-8);
+%%decode_integer_pos([], _) -> 0.
+
+
+%%decode_integer_neg([Byte|Tail], Shift) ->
+%% (-128 + (Byte band 127) bsl Shift) bor decode_integer_pos(Tail, Shift-8).
+
+
+concat_bit_binaries([],Bin={_,_}) ->
+ Bin;
+concat_bit_binaries({0,B1},{U2,B2}) ->
+ {U2,<<B1/binary,B2/binary>>};
+concat_bit_binaries({U1,B1},{U2,B2}) ->
+ S1 = (size(B1) * 8) - U1,
+ S2 = (size(B2) * 8) - U2,
+ PadBits = 8 - ((S1+S2) rem 8),
+ {PadBits, <<B1:S1/binary-unit:1,B2:S2/binary-unit:1,0:PadBits>>};
+concat_bit_binaries(L1,L2) when list(L1),list(L2) ->
+ %% this case occur when decoding with NNL
+ L1 ++ L2.
+
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%skip(Buffer, 0) ->
+%% Buffer;
+%%skip([H | T], Len) ->
+%% skip(T, Len-1).
+
+new_tags([],LastTag) ->
+ [LastTag];
+new_tags(Tags=[#tag{type='IMPLICIT'}],_LastTag) ->
+ Tags;
+new_tags([T1 = #tag{type='IMPLICIT'},#tag{type=T2Type}|Rest],LastTag) ->
+ new_tags([T1#tag{type=T2Type}|Rest],LastTag);
+new_tags(Tags,LastTag) ->
+ case lists:last(Tags) of
+ #tag{type='IMPLICIT'} ->
+ Tags;
+ _ ->
+ Tags ++ [LastTag]
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
new file mode 100644
index 0000000000..7f7846184a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
@@ -0,0 +1,1869 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_ber_bin_v2.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt_ber_bin_v2).
+
+%% encoding / decoding of BER
+
+-export([decode/1, decode/2, match_tags/2, encode/1]).
+-export([fixoptionals/2, cindex/3,
+ list_to_record/2,
+ encode_tag_val/1,
+ encode_tags/3]).
+-export([encode_boolean/2,decode_boolean/2,
+ encode_integer/3,encode_integer/4,
+ decode_integer/3, decode_integer/4,
+ encode_enumerated/2,
+ encode_enumerated/4,decode_enumerated/4,
+ encode_real/2,decode_real/3,
+ encode_bit_string/4,decode_bit_string/4,
+ decode_compact_bit_string/4,
+ encode_octet_string/3,decode_octet_string/3,
+ encode_null/2,decode_null/2,
+ encode_object_identifier/2,decode_object_identifier/2,
+ encode_restricted_string/4,decode_restricted_string/4,
+ encode_universal_string/3,decode_universal_string/3,
+ encode_BMP_string/3,decode_BMP_string/3,
+ encode_generalized_time/3,decode_generalized_time/3,
+ encode_utc_time/3,decode_utc_time/3,
+ encode_length/1,decode_length/1,
+ decode_tag_and_length/1]).
+
+-export([encode_open_type/1,encode_open_type/2,
+ decode_open_type/2,decode_open_type_as_binary/2]).
+
+-export([decode_primitive_incomplete/2]).
+
+-include("asn1_records.hrl").
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+%%% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+%%% The tag-number for universal types
+-define(N_BOOLEAN, 1).
+-define(N_INTEGER, 2).
+-define(N_BIT_STRING, 3).
+-define(N_OCTET_STRING, 4).
+-define(N_NULL, 5).
+-define(N_OBJECT_IDENTIFIER, 6).
+-define(N_OBJECT_DESCRIPTOR, 7).
+-define(N_EXTERNAL, 8).
+-define(N_REAL, 9).
+-define(N_ENUMERATED, 10).
+-define(N_EMBEDDED_PDV, 11).
+-define(N_SEQUENCE, 16).
+-define(N_SET, 17).
+-define(N_NumericString, 18).
+-define(N_PrintableString, 19).
+-define(N_TeletexString, 20).
+-define(N_VideotexString, 21).
+-define(N_IA5String, 22).
+-define(N_UTCTime, 23).
+-define(N_GeneralizedTime, 24).
+-define(N_GraphicString, 25).
+-define(N_VisibleString, 26).
+-define(N_GeneralString, 27).
+-define(N_UniversalString, 28).
+-define(N_BMPString, 30).
+
+
+% the complete tag-word of built-in types
+-define(T_BOOLEAN, ?UNIVERSAL bor ?PRIMITIVE bor 1).
+-define(T_INTEGER, ?UNIVERSAL bor ?PRIMITIVE bor 2).
+-define(T_BIT_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 3). % can be CONSTRUCTED
+-define(T_OCTET_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 4). % can be CONSTRUCTED
+-define(T_NULL, ?UNIVERSAL bor ?PRIMITIVE bor 5).
+-define(T_OBJECT_IDENTIFIER,?UNIVERSAL bor ?PRIMITIVE bor 6).
+-define(T_OBJECT_DESCRIPTOR,?UNIVERSAL bor ?PRIMITIVE bor 7).
+-define(T_EXTERNAL, ?UNIVERSAL bor ?PRIMITIVE bor 8).
+-define(T_REAL, ?UNIVERSAL bor ?PRIMITIVE bor 9).
+-define(T_ENUMERATED, ?UNIVERSAL bor ?PRIMITIVE bor 10).
+-define(T_EMBEDDED_PDV, ?UNIVERSAL bor ?PRIMITIVE bor 11).
+-define(T_SEQUENCE, ?UNIVERSAL bor ?CONSTRUCTED bor 16).
+-define(T_SET, ?UNIVERSAL bor ?CONSTRUCTED bor 17).
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_UTCTime, ?UNIVERSAL bor ?PRIMITIVE bor 23).
+-define(T_GeneralizedTime, ?UNIVERSAL bor ?PRIMITIVE bor 24).
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+-define(T_UniversalString, ?UNIVERSAL bor ?PRIMITIVE bor 28). %can be constructed
+-define(T_BMPString, ?UNIVERSAL bor ?PRIMITIVE bor 30). %can be constructed
+
+% encode(Tlv={_Tag={?PRIMITIVE,_},_VList}) ->
+% encode_primitive(Tlv);
+% encode(Tlv) ->
+% encode_constructed(Tlv).
+
+encode([Tlv]) ->
+ encode(Tlv);
+encode({TlvTag,TlvVal}) when list(TlvVal) ->
+ %% constructed form of value
+ encode_tlv(TlvTag,TlvVal,?CONSTRUCTED);
+encode({TlvTag,TlvVal}) ->
+ encode_tlv(TlvTag,TlvVal,?PRIMITIVE);
+encode(Bin) when binary(Bin) ->
+ Bin.
+
+encode_tlv(TlvTag,TlvVal,Form) ->
+ Tag = encode_tlv_tag(TlvTag,Form),
+ {Val,VLen} = encode_tlv_val(TlvVal),
+ {Len,_LLen} = encode_length(VLen),
+ BinLen = list_to_binary(Len),
+ <<Tag/binary,BinLen/binary,Val/binary>>.
+
+encode_tlv_tag(ClassTagNo,Form) ->
+ Class = ClassTagNo bsr 16,
+ case encode_tag_val({Class bsl 6,Form,(ClassTagNo - (Class bsl 16))}) of
+ T when list(T) ->
+ list_to_binary(T);
+ T ->
+ T
+ end.
+
+encode_tlv_val(TlvL) when list(TlvL) ->
+ encode_tlv_list(TlvL,[]);
+encode_tlv_val(Bin) ->
+ {Bin,size(Bin)}.
+
+encode_tlv_list([Tlv|Tlvs],Acc) ->
+ EncTlv = encode(Tlv),
+ encode_tlv_list(Tlvs,[EncTlv|Acc]);
+encode_tlv_list([],Acc) ->
+ Bin=list_to_binary(lists:reverse(Acc)),
+ {Bin,size(Bin)}.
+
+% encode_primitive({{_,ClassTagNo},V}) ->
+% Len = size(V), % not sufficient as length encode
+% Class = ClassTagNo bsr 16,
+% {TagLen,Tag} =
+% case encode_tag_val({Class,?PRIMITIVE,ClassTagNo - Class}) of
+% T when list(T) ->
+% {length(T),list_to_binary(T)};
+% T ->
+% {1,T}
+% end,
+
+
+decode(B,driver) ->
+ case catch port_control(drv_complete,2,B) of
+ Bin when binary(Bin) ->
+ binary_to_term(Bin);
+ List when list(List) -> handle_error(List,B);
+ {'EXIT',{badarg,Reason}} ->
+ asn1rt_driver_handler:load_driver(),
+ receive
+ driver_ready ->
+ case catch port_control(drv_complete,2,B) of
+ Bin2 when binary(Bin2) -> binary_to_term(Bin2);
+ List when list(List) -> handle_error(List,B);
+ Error -> exit(Error)
+ end;
+ {error,Error} -> % error when loading driver
+ %% the driver could not be loaded
+ exit(Error);
+ Error={port_error,Reason} ->
+ exit(Error)
+ end;
+ {'EXIT',Reason} ->
+ exit(Reason)
+ end.
+
+handle_error([],_)->
+ exit({error,{"memory allocation problem"}});
+handle_error([$1|_],L) -> % error in driver
+ exit({error,{asn1_error,L}});
+handle_error([$2|_],L) -> % error in driver due to wrong tag
+ exit({error,{asn1_error,{"bad tag",L}}});
+handle_error([$3|_],L) -> % error in driver due to length error
+ exit({error,{asn1_error,{"bad length field",L}}});
+handle_error([$4|_],L) -> % error in driver due to indefinite length error
+ exit({error,{asn1_error,{"indefinite length without end bytes",L}}});
+handle_error(ErrL,L) ->
+ exit({error,{unknown_error,ErrL,L}}).
+
+
+decode(Bin) when binary(Bin) ->
+ decode_primitive(Bin);
+decode(Tlv) -> % assume it is a tlv
+ {Tlv,<<>>}.
+
+
+decode_primitive(Bin) ->
+ {{Form,TagNo,Len,V},Rest} = decode_tlv(Bin),
+ case Form of
+ 1 when Len == indefinite -> % constructed
+ {Vlist,Rest2} = decode_constructed_indefinite(V,[]),
+ {{TagNo,Vlist},Rest2};
+ 1 -> % constructed
+ {{TagNo,decode_constructed(V)},Rest};
+ 0 -> % primitive
+ {{TagNo,V},Rest}
+ end.
+
+decode_constructed(<<>>) ->
+ [];
+decode_constructed(Bin) ->
+ {Tlv,Rest} = decode_primitive(Bin),
+ [Tlv|decode_constructed(Rest)].
+
+decode_constructed_indefinite(<<0,0,Rest/binary>>,Acc) ->
+ {lists:reverse(Acc),Rest};
+decode_constructed_indefinite(Bin,Acc) ->
+ {Tlv,Rest} = decode_primitive(Bin),
+ decode_constructed_indefinite(Rest, [Tlv|Acc]).
+
+decode_tlv(Bin) ->
+ {Form,TagNo,Len,Bin2} = decode_tag_and_length(Bin),
+ case Len of
+ indefinite ->
+ {{Form,TagNo,Len,Bin2},[]};
+ _ ->
+ <<V:Len/binary,Bin3/binary>> = Bin2,
+ {{Form,TagNo,Len,V},Bin3}
+ end.
+
+%% decode_primitive_incomplete/2 decodes an encoded message incomplete
+%% by help of the pattern attribute (first argument).
+decode_primitive_incomplete([[default,TagNo]],Bin) -> %default
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,[],Rest);
+ _ ->
+ %{asn1_DEFAULT,Bin}
+ asn1_NOVALUE
+ end;
+decode_primitive_incomplete([[default,TagNo,Directives]],Bin) -> %default, constructed type, Directives points into this type
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ %{asn1_DEFAULT,Bin}
+ asn1_NOVALUE
+ end;
+decode_primitive_incomplete([[opt,TagNo]],Bin) -> %optional
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,[],Rest);
+ _ ->
+ %{{TagNo,asn1_NOVALUE},Bin}
+ asn1_NOVALUE
+ end;
+decode_primitive_incomplete([[opt,TagNo,Directives]],Bin) -> %optional
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ %{{TagNo,asn1_NOVALUE},Bin}
+ asn1_NOVALUE
+ end;
+%% A choice alternative that shall be undecoded
+decode_primitive_incomplete([[alt_undec,TagNo]|RestAlts],Bin) ->
+% decode_incomplete_bin(Bin);
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,_V},_R} ->
+ decode_incomplete_bin(Bin);
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[alt,TagNo]|RestAlts],Bin) ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ {{TagNo,V},Rest};
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[alt,TagNo,Directives]|RestAlts],Bin) ->
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[alt_parts,TagNo]|RestAlts],Bin) ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ {{TagNo,decode_parts_incomplete(V)},Rest};
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[undec,_TagNo]|_RestTag],Bin) -> %incomlete decode
+ decode_incomplete_bin(Bin); %% use this if changing handling of
+decode_primitive_incomplete([[parts,TagNo]|_RestTag],Bin) ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ {{TagNo,decode_parts_incomplete(V)},Rest};
+ Err ->
+ {error,{asn1,"tag failure",TagNo,Err}}
+ end;
+decode_primitive_incomplete([mandatory|RestTag],Bin) ->
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,RestTag,Rest);
+ _ ->
+ {error,{asn1,"partial incomplete decode failure"}}
+ end;
+%% A choice that is a toptype or a mandatory component of a
+%% SEQUENCE or SET.
+decode_primitive_incomplete([[mandatory,Directives]],Bin) ->
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ {error,{asn1,"partial incomplete decode failure"}}
+ end;
+decode_primitive_incomplete([],Bin) ->
+ decode_primitive(Bin).
+
+%% decode_parts_incomplete/1 receives a number of values encoded in
+%% sequence and returns the parts as unencoded binaries
+decode_parts_incomplete(<<>>) ->
+ [];
+decode_parts_incomplete(Bin) ->
+ {ok,Rest} = skip_tag(Bin),
+ {ok,Rest2} = skip_length_and_value(Rest),
+ LenPart = size(Bin) - size(Rest2),
+ <<Part:LenPart/binary,RestBin/binary>> = Bin,
+ [Part|decode_parts_incomplete(RestBin)].
+
+
+%% decode_incomplete2 checks if V is a value of a constructed or
+%% primitive type, and continues the decode propeerly.
+decode_incomplete2(1,TagNo,indefinite,V,TagMatch,_) ->
+ %% constructed indefinite length
+ {Vlist,Rest2} = decode_constr_indef_incomplete(TagMatch,V,[]),
+ {{TagNo,Vlist},Rest2};
+decode_incomplete2(1,TagNo,_Len,V,TagMatch,Rest) ->
+ {{TagNo,decode_constructed_incomplete(TagMatch,V)},Rest};
+decode_incomplete2(0,TagNo,_Len,V,_TagMatch,Rest) ->
+ {{TagNo,V},Rest}.
+
+decode_constructed_incomplete(_TagMatch,<<>>) ->
+ [];
+decode_constructed_incomplete([mandatory|RestTag],Bin) ->
+ {Tlv,Rest} = decode_primitive(Bin),
+ [Tlv|decode_constructed_incomplete(RestTag,Rest)];
+decode_constructed_incomplete(Directives=[[Alt,_]|_],Bin)
+ when Alt == alt_undec; Alt == alt ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ case incomplete_choice_alt(TagNo,Directives) of
+ alt_undec ->
+ LenA = size(Bin)-size(Rest),
+ <<A:LenA/binary,Rest/binary>> = Bin,
+ A;
+% {UndecBin,_}=decode_incomplete_bin(Bin),
+% UndecBin;
+% [{TagNo,V}];
+ alt ->
+ {Tlv,_} = decode_primitive(V),
+ [{TagNo,Tlv}];
+ alt_parts ->
+ %{{TagNo,decode_parts_incomplete(V)},Rest}; % maybe wrong
+ [{TagNo,decode_parts_incomplete(V)}];
+ Err ->
+ {error,{asn1,"partial incomplete decode failure",Err}}
+ end;
+ _ ->
+ {error,{asn1,"partial incomplete decode failure"}}
+ end;
+decode_constructed_incomplete([TagNo|RestTag],Bin) ->
+%% {Tlv,Rest} = decode_primitive_incomplete([TagNo],Bin),
+ case decode_primitive_incomplete([TagNo],Bin) of
+ {Tlv,Rest} ->
+ [Tlv|decode_constructed_incomplete(RestTag,Rest)];
+ asn1_NOVALUE ->
+ decode_constructed_incomplete(RestTag,Bin)
+ end;
+decode_constructed_incomplete([],Bin) ->
+ {Tlv,_Rest}=decode_primitive(Bin),
+ [Tlv].
+
+decode_constr_indef_incomplete(_TagMatch,<<0,0,Rest/binary>>,Acc) ->
+ {lists:reverse(Acc),Rest};
+decode_constr_indef_incomplete([Tag|RestTags],Bin,Acc) ->
+% {Tlv,Rest} = decode_primitive_incomplete([Tag],Bin),
+ case decode_primitive_incomplete([Tag],Bin) of
+ {Tlv,Rest} ->
+ decode_constr_indef_incomplete(RestTags,Rest,[Tlv|Acc]);
+ asn1_NOVALUE ->
+ decode_constr_indef_incomplete(RestTags,Bin,Acc)
+ end.
+
+
+decode_incomplete_bin(Bin) ->
+ {ok,Rest} = skip_tag(Bin),
+ {ok,Rest2} = skip_length_and_value(Rest),
+ IncLen = size(Bin) - size(Rest2),
+ <<IncBin:IncLen/binary,Ret/binary>> = Bin,
+ {IncBin,Ret}.
+
+incomplete_choice_alt(TagNo,[[Alt,TagNo]|_Directives]) ->
+ Alt;
+incomplete_choice_alt(TagNo,[_H|Directives]) ->
+ incomplete_choice_alt(TagNo,Directives);
+incomplete_choice_alt(_,[]) ->
+ error.
+
+
+%% skip_tag and skip_length_and_value are rutines used both by
+%% decode_partial_incomplete and decode_partial (decode/2).
+
+skip_tag(<<_:3,31:5,Rest/binary>>)->
+ skip_long_tag(Rest);
+skip_tag(<<_:3,_Tag:5,Rest/binary>>) ->
+ {ok,Rest}.
+
+skip_long_tag(<<1:1,_:7,Rest/binary>>) ->
+ skip_long_tag(Rest);
+skip_long_tag(<<0:1,_:7,Rest/binary>>) ->
+ {ok,Rest}.
+
+skip_length_and_value(Binary) ->
+ case decode_length(Binary) of
+ {indefinite,RestBinary} ->
+ skip_indefinite_value(RestBinary);
+ {Length,RestBinary} ->
+ <<_:Length/unit:8,Rest/binary>> = RestBinary,
+ {ok,Rest}
+ end.
+
+skip_indefinite_value(<<0,0,Rest/binary>>) ->
+ {ok,Rest};
+skip_indefinite_value(Binary) ->
+ {ok,RestBinary}=skip_tag(Binary),
+ {ok,RestBinary2} = skip_length_and_value(RestBinary),
+ skip_indefinite_value(RestBinary2).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% match_tags takes a Tlv (Tag, Length, Value) structure and matches
+%% it with the tags in TagList. If the tags does not match the function
+%% crashes otherwise it returns the remaining Tlv after that the tags have
+%% been removed.
+%%
+%% match_tags(Tlv, TagList)
+%%
+
+
+match_tags({T,V}, [T|Tt]) ->
+ match_tags(V,Tt);
+match_tags([{T,V}],[T|Tt]) ->
+ match_tags(V, Tt);
+match_tags(Vlist = [{T,_V}|_], [T]) ->
+ Vlist;
+match_tags(Tlv, []) ->
+ Tlv;
+match_tags({Tag,_V},[T|_Tt]) ->
+ {error,{asn1,{wrong_tag,{Tag,T}}}}.
+
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Optionals, preset not filled optionals with asn1_NOVALUE
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,_Acc1,Acc2) ->
+ % return Val as a record
+ list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]).
+
+
+%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%% 8bit Int | binary
+encode_tag_val({Class, Form, TagNo}) when (TagNo =< 30) ->
+ <<(Class bsr 6):2,(Form bsr 5):1,TagNo:5>>;
+
+encode_tag_val({Class, Form, TagNo}) ->
+ {Octets,_Len} = mk_object_val(TagNo),
+ BinOct = list_to_binary(Octets),
+ <<(Class bsr 6):2, (Form bsr 5):1, 31:5,BinOct/binary>>;
+
+%% asumes whole correct tag bitpattern, multiple of 8
+encode_tag_val(Tag) when (Tag =< 255) -> Tag; %% anv�nds denna funktion??!!
+%% asumes correct bitpattern of 0-5
+encode_tag_val(Tag) -> encode_tag_val2(Tag,[]).
+
+encode_tag_val2(Tag, OctAck) when (Tag =< 255) ->
+ [Tag | OctAck];
+encode_tag_val2(Tag, OctAck) ->
+ encode_tag_val2(Tag bsr 8, [255 band Tag | OctAck]).
+
+
+%%===============================================================================
+%% Decode a tag
+%%
+%% decode_tag(OctetListBuffer) -> {{Form, (Class bsl 16)+ TagNo}, RestOfBuffer, RemovedBytes}
+%%===============================================================================
+
+decode_tag_and_length(<<Class:2, Form:1, TagNo:5, 0:1, Length:7, RestBuffer/binary>>) when TagNo < 31 ->
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, TagNo:5, 1:1, 0:7, T/binary>>) when TagNo < 31 ->
+ {Form, (Class bsl 16) + TagNo, indefinite, T};
+decode_tag_and_length(<<Class:2, Form:1, TagNo:5, 1:1, LL:7, T/binary>>) when TagNo < 31 ->
+ <<Length:LL/unit:8,RestBuffer/binary>> = T,
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, 0:1, TagNo:7, 0:1, Length:7, RestBuffer/binary>>) ->
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, 0:1, TagNo:7, 1:1, 0:7, T/binary>>) ->
+ {Form, (Class bsl 16) + TagNo, indefinite, T};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, 0:1, TagNo:7, 1:1, LL:7, T/binary>>) ->
+ <<Length:LL/unit:8,RestBuffer/binary>> = T,
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, Buffer/binary>>) ->
+ {TagNo, Buffer1} = decode_tag(Buffer, 0),
+ {Length, RestBuffer} = decode_length(Buffer1),
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer}.
+
+
+
+%% last partial tag
+decode_tag(<<0:1,PartialTag:7, Buffer/binary>>, TagAck) ->
+ TagNo = (TagAck bsl 7) bor PartialTag,
+ %%<<TagNo>> = <<TagAck:1, PartialTag:7>>,
+ {TagNo, Buffer};
+% more tags
+decode_tag(<<_:1,PartialTag:7, Buffer/binary>>, TagAck) ->
+ TagAck1 = (TagAck bsl 7) bor PartialTag,
+ %%<<TagAck1:16>> = <<TagAck:1, PartialTag:7,0:8>>,
+ decode_tag(Buffer, TagAck1).
+
+
+%%=======================================================================
+%%
+%% Encode all tags in the list Tags and return a possibly deep list of
+%% bytes with tag and length encoded
+%% The taglist must be in reverse order (fixed by the asn1 compiler)
+%% e.g [T1,T2] will result in
+%% {[EncodedT2,EncodedT1|BytesSoFar],LenSoFar+LenT2+LenT1}
+%%
+
+encode_tags([Tag|Trest], BytesSoFar, LenSoFar) ->
+% remove {Bytes1,L1} = encode_one_tag(Tag),
+ {Bytes2,L2} = encode_length(LenSoFar),
+ encode_tags(Trest, [Tag,Bytes2|BytesSoFar],
+ LenSoFar + size(Tag) + L2);
+encode_tags([], BytesSoFar, LenSoFar) ->
+ {BytesSoFar,LenSoFar}.
+
+encode_tags(TagIn, {BytesSoFar,LenSoFar}) ->
+ encode_tags(TagIn, BytesSoFar, LenSoFar).
+
+% encode_one_tag(#tag{class=Class,number=No,type=Type, form = Form}) ->
+% NewForm = case Type of
+% 'EXPLICIT' ->
+% ?CONSTRUCTED;
+% _ ->
+% Form
+% end,
+% Bytes = encode_tag_val({Class,NewForm,No}),
+% {Bytes,size(Bytes)}.
+
+
+%%===============================================================================
+%%
+%% This comment is valid for all the encode/decode functions
+%%
+%% C = Constraint -> typically {'ValueRange',LowerBound,UpperBound}
+%% used for PER-coding but not for BER-coding.
+%%
+%% Val = Value. If Val is an atom then it is a symbolic integer value
+%% (i.e the atom must be one of the names in the NamedNumberList).
+%% The NamedNumberList is used to translate the atom to an integer value
+%% before encoding.
+%%
+%%===============================================================================
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Value) -> io_list (i.e nested list with integers, binaries)
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+
+%%
+encode_open_type(Val) when list(Val) ->
+% {Val,length(Val)};
+ encode_open_type(list_to_binary(Val));
+encode_open_type(Val) ->
+ {Val, size(Val)}.
+
+%%
+encode_open_type(Val, T) when list(Val) ->
+ encode_open_type(list_to_binary(Val),T);
+encode_open_type(Val,[]) ->
+ {Val, size(Val)};
+encode_open_type(Val,Tag) ->
+ encode_tags(Tag,Val, size(Val)).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Tlv, TagIn) -> Value
+%% Tlv = {Tag,V} | V where V -> binary()
+%% TagIn = [TagVal] where TagVal -> int()
+%% Value = binary with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Tlv, TagIn) ->
+ case match_tags(Tlv,TagIn) of
+ Bin when binary(Bin) ->
+ {InnerTlv,_} = decode(Bin),
+ InnerTlv;
+ TlvBytes -> TlvBytes
+ end.
+
+
+decode_open_type_as_binary(Tlv,TagIn)->
+ case match_tags(Tlv,TagIn) of
+ V when binary(V) ->
+ V;
+ [Tlv2] -> encode(Tlv2);
+ Tlv2 -> encode(Tlv2)
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Boolean, ITU_T X.690 Chapter 8.2
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode_boolean(Integer, ReversedTagList) -> {[Octet],Len}
+%%===============================================================================
+
+encode_boolean({Name, Val}, TagIn) when atom(Name) ->
+ encode_boolean(Val, TagIn);
+encode_boolean(true, TagIn) ->
+ encode_tags(TagIn, [16#FF],1);
+encode_boolean(false, TagIn) ->
+ encode_tags(TagIn, [0],1);
+encode_boolean(X,_) ->
+ exit({error,{asn1, {encode_boolean, X}}}).
+
+
+%%===============================================================================
+%% decode_boolean(BuffList, HasTag, TotalLen) -> {true, Remain, RemovedBytes} |
+%% {false, Remain, RemovedBytes}
+%%===============================================================================
+decode_boolean(Tlv,TagIn) ->
+ Val = match_tags(Tlv, TagIn),
+ case Val of
+ <<0:8>> ->
+ false;
+ <<_:8>> ->
+ true;
+ _ ->
+ exit({error,{asn1, {decode_boolean, Val}}})
+ end.
+
+
+%%===========================================================================
+%% Integer, ITU_T X.690 Chapter 8.3
+
+%% encode_integer(Constraint, Value, Tag) -> [octet list]
+%% encode_integer(Constraint, Name, NamedNumberList, Tag) -> [octet list]
+%% Value = INTEGER | {Name,INTEGER}
+%% Tag = tag | notag
+%%===========================================================================
+
+encode_integer(C, Val, Tag) when integer(Val) ->
+ encode_tags(Tag, encode_integer(C, Val));
+encode_integer(C,{Name,Val},Tag) when atom(Name) ->
+ encode_integer(C,Val,Tag);
+encode_integer(_C, Val, _Tag) ->
+ exit({error,{asn1, {encode_integer, Val}}}).
+
+
+
+encode_integer(C, Val, NamedNumberList, Tag) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value,{_, NewVal}} ->
+ encode_tags(Tag, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {encode_integer_namednumber, Val}}})
+ end;
+encode_integer(C,{_Name,Val},NamedNumberList,Tag) ->
+ encode_integer(C,Val,NamedNumberList,Tag);
+encode_integer(C, Val, _NamedNumberList, Tag) ->
+ encode_tags(Tag, encode_integer(C, Val)).
+
+
+encode_integer(_, Val) ->
+ Bytes =
+ if
+ Val >= 0 ->
+ encode_integer_pos(Val, []);
+ true ->
+ encode_integer_neg(Val, [])
+ end,
+ {Bytes,length(Bytes)}.
+
+encode_integer_pos(0, L=[B|_Acc]) when B < 128 ->
+ L;
+encode_integer_pos(N, Acc) ->
+ encode_integer_pos((N bsr 8), [N band 16#ff| Acc]).
+
+encode_integer_neg(-1, L=[B1|_T]) when B1 > 127 ->
+ L;
+encode_integer_neg(N, Acc) ->
+ encode_integer_neg(N bsr 8, [N band 16#ff|Acc]).
+
+%%===============================================================================
+%% decode integer
+%% (Buffer, Range, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%===============================================================================
+
+decode_integer(Tlv,Range,NamedNumberList,TagIn) ->
+ V = match_tags(Tlv,TagIn),
+ Int = decode_integer(V),
+ range_check_integer(Int,Range),
+ number2name(Int,NamedNumberList).
+
+decode_integer(Tlv,Range,TagIn) ->
+ V = match_tags(Tlv, TagIn),
+ Int = decode_integer(V),
+ range_check_integer(Int,Range),
+ Int.
+
+%% decoding postitive integer values.
+decode_integer(Bin = <<0:1,_:7,_/binary>>) ->
+ Len = size(Bin),
+% <<Int:Len/unit:8,Buffer2/binary>> = Bin,
+ <<Int:Len/unit:8>> = Bin,
+ Int;
+%% decoding negative integer values.
+decode_integer(Bin = <<1:1,B2:7,Bs/binary>>) ->
+ Len = size(Bin),
+% <<N:Len/unit:8,Buffer2/binary>> = <<B2,Bs/binary>>,
+ <<N:Len/unit:8>> = <<B2,Bs/binary>>,
+ Int = N - (1 bsl (8 * Len - 1)),
+ Int.
+
+range_check_integer(Int,Range) ->
+ case Range of
+ [] -> % No length constraint
+ Int;
+ {Lb,Ub} when Int >= Lb, Ub >= Int -> % variable length constraint
+ Int;
+ Int -> % fixed value constraint
+ Int;
+ {_,_} ->
+ exit({error,{asn1,{integer_range,Range,Int}}});
+ SingleValue when integer(SingleValue) ->
+ exit({error,{asn1,{integer_range,Range,Int}}});
+ _ -> % some strange constraint that we don't support yet
+ Int
+ end.
+
+number2name(Int,[]) ->
+ Int;
+number2name(Int,NamedNumberList) ->
+ case lists:keysearch(Int, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ Int
+ end.
+
+
+%%============================================================================
+%% Enumerated value, ITU_T X.690 Chapter 8.4
+
+%% encode enumerated value
+%%============================================================================
+encode_enumerated(Val, TagIn) when integer(Val)->
+ encode_tags(TagIn, encode_integer(false,Val));
+encode_enumerated({Name,Val}, TagIn) when atom(Name) ->
+ encode_enumerated(Val, TagIn).
+
+%% The encode_enumerated functions below this line can be removed when the
+%% new code generation is stable. (the functions might have to be kept here
+%% a while longer for compatibility reasons)
+
+encode_enumerated(C, Val, {NamedNumberList,ExtList}, TagIn) when atom(Val) ->
+ case catch encode_enumerated(C, Val, NamedNumberList, TagIn) of
+ {'EXIT',_} -> encode_enumerated(C, Val, ExtList, TagIn);
+ Result -> Result
+ end;
+
+encode_enumerated(C, Val, NamedNumberList, TagIn) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value, {_, NewVal}} ->
+ encode_tags(TagIn, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {enumerated_not_in_range, Val}}})
+ end;
+
+encode_enumerated(C, {asn1_enum, Val}, {_,_}, TagIn) when integer(Val) ->
+ encode_tags(TagIn, encode_integer(C,Val));
+
+encode_enumerated(C, {Name,Val}, NamedNumberList, TagIn) when atom(Name) ->
+ encode_enumerated(C, Val, NamedNumberList, TagIn);
+
+encode_enumerated(_C, Val, _NamedNumberList, _TagIn) ->
+ exit({error,{asn1, {enumerated_not_namednumber, Val}}}).
+
+
+
+%%============================================================================
+%% decode enumerated value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> Value
+%%===========================================================================
+decode_enumerated(Tlv, Range, NamedNumberList, Tags) ->
+ Buffer = match_tags(Tlv,Tags),
+ decode_enumerated_notag(Buffer, Range, NamedNumberList, Tags).
+
+decode_enumerated_notag(Buffer, _Range, {NamedNumberList,ExtList}, _Tags) ->
+
+ IVal = decode_integer2(size(Buffer), Buffer),
+ case decode_enumerated1(IVal, NamedNumberList) of
+ {asn1_enum,IVal} ->
+ decode_enumerated1(IVal,ExtList);
+ EVal ->
+ EVal
+ end;
+decode_enumerated_notag(Buffer, _Range, NNList, _Tags) ->
+ IVal = decode_integer2(size(Buffer), Buffer),
+ case decode_enumerated1(IVal, NNList) of
+ {asn1_enum,_} ->
+ exit({error,{asn1, {illegal_enumerated, IVal}}});
+ EVal ->
+ EVal
+ end.
+
+decode_enumerated1(Val, NamedNumberList) ->
+ %% it must be a named integer
+ case lists:keysearch(Val, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ {asn1_enum,Val}
+ end.
+
+
+%%============================================================================
+%%
+%% Real value, ITU_T X.690 Chapter 8.5
+%%============================================================================
+%%
+%% encode real value
+%%============================================================================
+
+%% only base 2 internally so far!!
+encode_real(0, TagIn) ->
+ encode_tags(TagIn, {[],0});
+encode_real('PLUS-INFINITY', TagIn) ->
+ encode_tags(TagIn, {[64],1});
+encode_real('MINUS-INFINITY', TagIn) ->
+ encode_tags(TagIn, {[65],1});
+encode_real(Val, TagIn) when tuple(Val)->
+ encode_tags(TagIn, encode_real(Val)).
+
+%%%%%%%%%%%%%%
+%% not optimal efficient..
+%% only base 2 of Mantissa encoding!
+%% only base 2 of ExpBase encoding!
+encode_real({Man, Base, Exp}) ->
+%% io:format("Mantissa: ~w Base: ~w, Exp: ~w~n",[Man, Base, Exp]),
+
+ OctExp = if Exp >= 0 -> list_to_binary(encode_integer_pos(Exp, []));
+ true -> list_to_binary(encode_integer_neg(Exp, []))
+ end,
+%% ok = io:format("OctExp: ~w~n",[OctExp]),
+ SignBit = if Man > 0 -> 0; % bit 7 is pos or neg, no Zeroval
+ true -> 1
+ end,
+%% ok = io:format("SignBitMask: ~w~n",[SignBitMask]),
+ InBase = if Base =:= 2 -> 0; % bit 6,5: only base 2 this far!
+ true ->
+ exit({error,{asn1, {encode_real_non_supported_encodeing, Base}}})
+ end,
+ SFactor = 0, % bit 4,3: no scaling since only base 2
+ OctExpLen = size(OctExp),
+ if OctExpLen > 255 ->
+ exit({error,{asn1, {to_big_exp_in_encode_real, OctExpLen}}});
+ true -> true %% make real assert later..
+ end,
+ {LenCode, EOctets} = case OctExpLen of % bit 2,1
+ 1 -> {0, OctExp};
+ 2 -> {1, OctExp};
+ 3 -> {2, OctExp};
+ _ -> {3, <<OctExpLen, OctExp/binary>>}
+ end,
+ FirstOctet = <<1:1,SignBit:1,InBase:2,SFactor:2,LenCode:2>>,
+ OctMantissa = if Man > 0 -> list_to_binary(minimum_octets(Man));
+ true -> list_to_binary(minimum_octets(-(Man))) % signbit keeps track of sign
+ end,
+ %% ok = io:format("LenMask: ~w EOctets: ~w~nFirstOctet: ~w OctMantissa: ~w OctExpLen: ~w~n", [LenMask, EOctets, FirstOctet, OctMantissa, OctExpLen]),
+ Bin = <<FirstOctet/binary, EOctets/binary, OctMantissa/binary>>,
+ {Bin, size(Bin)}.
+
+
+%%============================================================================
+%% decode real value
+%%
+%% decode_real([OctetBufferList], tuple|value, tag|notag) ->
+%% {{Mantissa, Base, Exp} | realval | PLUS-INFINITY | MINUS-INFINITY | 0,
+%% RestBuff}
+%%
+%% only for base 2 decoding sofar!!
+%%============================================================================
+
+decode_real(Tlv, Form, Tags) ->
+ Buffer = match_tags(Tlv,Tags),
+ decode_real_notag(Buffer, Form).
+
+decode_real_notag(_Buffer, _Form) ->
+ exit({error,{asn1, {unimplemented,real}}}).
+%% decode_real2(Buffer, Form, size(Buffer)).
+
+% decode_real2(Buffer, Form, Len) ->
+% <<First, Buffer2/binary>> = Buffer,
+% if
+% First =:= 2#01000000 -> {'PLUS-INFINITY', Buffer2};
+% First =:= 2#01000001 -> {'MINUS-INFINITY', Buffer2};
+% First =:= 2#00000000 -> {0, Buffer2};
+% true ->
+% %% have some check here to verify only supported bases (2)
+% <<B7:1,B6:1,B5_4:2,B3_2:2,B1_0:2>> = <<First>>,
+% Sign = B6,
+% Base =
+% case B5_4 of
+% 0 -> 2; % base 2, only one so far
+% _ -> exit({error,{asn1, {non_supported_base, First}}})
+% end,
+% ScalingFactor =
+% case B3_2 of
+% 0 -> 0; % no scaling so far
+% _ -> exit({error,{asn1, {non_supported_scaling, First}}})
+% end,
+
+% {FirstLen,Exp,Buffer3} =
+% case B1_0 of
+% 0 ->
+% <<_:1/unit:8,Buffer21/binary>> = Buffer2,
+% {2, decode_integer2(1, Buffer2),Buffer21};
+% 1 ->
+% <<_:2/unit:8,Buffer21/binary>> = Buffer2,
+% {3, decode_integer2(2, Buffer2)};
+% 2 ->
+% <<_:3/unit:8,Buffer21/binary>> = Buffer2,
+% {4, decode_integer2(3, Buffer2)};
+% 3 ->
+% <<ExpLen1,RestBuffer/binary>> = Buffer2,
+% <<_:ExpLen1/unit:8,RestBuffer2/binary>> = RestBuffer,
+% { ExpLen1 + 2,
+% decode_integer2(ExpLen1, RestBuffer, RemBytes1),
+% RestBuffer2}
+% end,
+% Length = Len - FirstLen,
+% <<LongInt:Length/unit:8,RestBuff/binary>> = Buffer3,
+% {Mantissa, Buffer4} =
+% if Sign =:= 0 ->
+
+% {LongInt, RestBuff};% sign plus,
+% true ->
+
+% {-LongInt, RestBuff}% sign minus
+% end,
+% case Form of
+% tuple ->
+% {Val,Buf,RemB} = Exp,
+% {{Mantissa, Base, {Val,Buf}}, Buffer4, RemBytes2+RemBytes3};
+% _value ->
+% comming
+% end
+% end.
+
+
+%%============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.6
+%%
+%% encode bitstring value
+%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constrint Len, only valid when identifiers
+%%============================================================================
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList,TagIn) when integer(Unused), binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList,TagIn);
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList, TagIn) when atom(FirstVal) ->
+ encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, TagIn);
+
+encode_bit_string(C, [{bit,X} | RestVal], NamedBitList, TagIn) ->
+ encode_bit_string_named(C, [{bit,X} | RestVal], NamedBitList, TagIn);
+
+encode_bit_string(C, [FirstVal| RestVal], NamedBitList, TagIn) when integer(FirstVal) ->
+ encode_bit_string_bits(C, [FirstVal | RestVal], NamedBitList, TagIn);
+
+encode_bit_string(_C, 0, _NamedBitList, TagIn) ->
+ encode_tags(TagIn, <<0>>,1);
+
+encode_bit_string(_C, [], _NamedBitList, TagIn) ->
+ encode_tags(TagIn, <<0>>,1);
+
+encode_bit_string(C, IntegerVal, NamedBitList, TagIn) when integer(IntegerVal) ->
+ BitListVal = int_to_bitlist(IntegerVal),
+ encode_bit_string_bits(C, BitListVal, NamedBitList, TagIn);
+
+encode_bit_string(C, {Name,BitList}, NamedBitList, TagIn) when atom(Name) ->
+ encode_bit_string(C, BitList, NamedBitList, TagIn).
+
+
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+
+%%=================================================================
+%% Encode BIT STRING of the form {Unused,BinBits}.
+%% Unused is the number of unused bits in the last byte in BinBits
+%% and BinBits is a binary representing the BIT STRING.
+%%=================================================================
+encode_bin_bit_string(C,{Unused,BinBits},_NamedBitList,TagIn)->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ remove_unused_then_dotag(TagIn, Unused, BinBits);
+ {_Min,Max} ->
+ BBLen = (size(BinBits)*8)-Unused,
+ if
+ BBLen > Max ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBLen},{maximum,Max}}}}});
+ true ->
+ remove_unused_then_dotag(TagIn, Unused, BinBits)
+ end;
+ Size ->
+ case ((size(BinBits)*8)-Unused) of
+ BBSize when BBSize =< Size ->
+ remove_unused_then_dotag(TagIn, Unused, BinBits);
+ BBSize ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBSize},{should_be,Size}}}}})
+ end
+ end.
+
+remove_unused_then_dotag(TagIn,Unused,BinBits) ->
+ case Unused of
+ 0 when (size(BinBits) == 0) ->
+ encode_tags(TagIn,<<0>>,1);
+ 0 ->
+ Bin = <<Unused,BinBits/binary>>,
+ encode_tags(TagIn,Bin,size(Bin));
+ Num ->
+ N = (size(BinBits)-1),
+ <<BBits:N/binary,LastByte>> = BinBits,
+ encode_tags(TagIn,
+ [Unused,binary_to_list(BBits) ++[(LastByte bsr Num) bsl Num]],
+ 1+size(BinBits))
+ end.
+
+
+%%=================================================================
+%% Encode named bits
+%%=================================================================
+
+encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, TagIn) ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal], NamedBitList, []),
+ Size =
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ lists:max(ToSetPos)+1;
+ {_Min,Max} ->
+ Max;
+ TSize ->
+ TSize
+ end,
+ BitList = make_and_set_list(Size, ToSetPos, 0),
+ {Len, Unused, OctetList} = encode_bitstring(BitList),
+ encode_tags(TagIn, [Unused|OctetList],Len+1).
+
+
+%%----------------------------------------
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+%%----------------------------------------
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+get_all_bitposes([Val | Rest], NamedBitList, Ack) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+
+%%----------------------------------------
+%% make_and_set_list(Len of list to return, [list of positions to set to 1])->
+%% returns list of Len length, with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%% Len will make a list of length Len, not Len + 1.
+%% BitList = make_and_set_list(C, ToSetPos, 0),
+%%----------------------------------------
+
+make_and_set_list(0, [], _) -> [];
+make_and_set_list(0, _, _) ->
+ exit({error,{asn1,bitstring_sizeconstraint}});
+make_and_set_list(Len, [XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(Len - 1, SetPos, XPos + 1)];
+make_and_set_list(Len, [Pos|SetPos], XPos) ->
+ [0 | make_and_set_list(Len - 1, [Pos | SetPos], XPos + 1)];
+make_and_set_list(Len, [], XPos) ->
+ [0 | make_and_set_list(Len - 1, [], XPos + 1)].
+
+
+
+
+
+
+%%=================================================================
+%% Encode bit string for lists of ones and zeroes
+%%=================================================================
+encode_bit_string_bits(C, BitListVal, _NamedBitList, TagIn) when list(BitListVal) ->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused | OctetList], Len+1);
+ Constr={Min,Max} when integer(Min),integer(Max) ->
+ encode_constr_bit_str_bits(Constr,BitListVal,TagIn);
+ {Constr={_,_},[]} ->%Constr={Min,Max}
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,TagIn);
+ Constr={{_,_},{_,_}} ->%{{Min1,Max1},{Min2,Max2}}
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,TagIn);
+ Size ->
+ case length(BitListVal) of
+ BitSize when BitSize == Size ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused | OctetList], Len+1);
+ BitSize when BitSize < Size ->
+ PaddedList = pad_bit_list(Size-BitSize,BitListVal),
+ {Len, Unused, OctetList} = encode_bitstring(PaddedList),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused | OctetList], Len+1);
+ BitSize ->
+ exit({error,{asn1,
+ {bitstring_length, {{was,BitSize},{should_be,Size}}}}})
+ end
+
+ end.
+
+encode_constr_bit_str_bits({_Min,Max},BitListVal,TagIn) ->
+ BitLen = length(BitListVal),
+ if
+ BitLen > Max ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max}}}}});
+ true ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused, OctetList], Len+1)
+ end;
+encode_constr_bit_str_bits({{_Min1,Max1},{Min2,Max2}},BitListVal,TagIn) ->
+ BitLen = length(BitListVal),
+ case BitLen of
+ Len when Len > Max2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max2}}}}});
+ Len when Len > Max1, Len < Min2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {not_allowed_interval,
+ Max1,Min2}}}}});
+ _ ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused, OctetList], Len+1)
+ end.
+
+%% returns a list of length Size + length(BitListVal), with BitListVal
+%% as the most significant elements followed by padded zero elements
+pad_bit_list(Size,BitListVal) ->
+ Tail = lists:duplicate(Size,0),
+ lists:append(BitListVal,Tail).
+
+%%=================================================================
+%% Do the actual encoding
+%% ([bitlist]) -> {ListLen, UnusedBits, OctetList}
+%%=================================================================
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest]) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Val], 1);
+encode_bitstring(Val) ->
+ {Unused, Octet} = unused_bitlist(Val, 7, 0),
+ {1, Unused, [Octet]}.
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest], Ack, Len) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Ack | [Val]], Len + 1);
+%%even multiple of 8 bits..
+encode_bitstring([], Ack, Len) ->
+ {Len, 0, Ack};
+%% unused bits in last octet
+encode_bitstring(Rest, Ack, Len) ->
+% io:format("uneven ~w ~w ~w~n",[Rest, Ack, Len]),
+ {Unused, Val} = unused_bitlist(Rest, 7, 0),
+ {Len + 1, Unused, [Ack | [Val]]}.
+
+%%%%%%%%%%%%%%%%%%
+%% unused_bitlist([list of ones and zeros <= 7], 7, []) ->
+%% {Unused bits, Last octet with bits moved to right}
+unused_bitlist([], Trail, Ack) ->
+ {Trail + 1, Ack};
+unused_bitlist([Bit | Rest], Trail, Ack) ->
+%% io:format("trail Bit: ~w Rest: ~w Trail: ~w Ack:~w~n",[Bit, Rest, Trail, Ack]),
+ unused_bitlist(Rest, Trail - 1, (Bit bsl Trail) bor Ack).
+
+
+%%============================================================================
+%% decode bitstring value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%============================================================================
+
+decode_compact_bit_string(Buffer, Range, NamedNumberList, Tags) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags,
+ NamedNumberList,bin).
+
+decode_bit_string(Buffer, Range, NamedNumberList, Tags) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags,
+ NamedNumberList,old).
+
+
+decode_bit_string2(<<0>>,_NamedNumberList,BinOrOld) ->
+ case BinOrOld of
+ bin ->
+ {0,<<>>};
+ _ ->
+ []
+ end;
+decode_bit_string2(<<Unused,Bits/binary>>,NamedNumberList,BinOrOld) ->
+ case NamedNumberList of
+ [] ->
+ case BinOrOld of
+ bin ->
+ {Unused,Bits};
+ _ ->
+ decode_bitstring2(size(Bits), Unused, Bits)
+ end;
+ _ ->
+ BitString = decode_bitstring2(size(Bits), Unused, Bits),
+ decode_bitstring_NNL(BitString,NamedNumberList)
+ end.
+
+%%----------------------------------------
+%% Decode the in buffer to bits
+%%----------------------------------------
+decode_bitstring2(1,Unused,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,_/binary>>) ->
+ lists:sublist([B7,B6,B5,B4,B3,B2,B1,B0],8-Unused);
+decode_bitstring2(Len, Unused,
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Buffer/binary>>) ->
+ [B7, B6, B5, B4, B3, B2, B1, B0 |
+ decode_bitstring2(Len - 1, Unused, Buffer)].
+
+%%decode_bitstring2(1, Unused, Buffer) ->
+%% make_bits_of_int(hd(Buffer), 128, 8-Unused);
+%%decode_bitstring2(Len, Unused, [BitVal | Buffer]) ->
+%% [B7, B6, B5, B4, B3, B2, B1, B0] = make_bits_of_int(BitVal, 128, 8),
+%% [B7, B6, B5, B4, B3, B2, B1, B0 |
+%% decode_bitstring2(Len - 1, Unused, Buffer)].
+
+
+%%make_bits_of_int(_, _, 0) ->
+%% [];
+%%make_bits_of_int(BitVal, MaskVal, Unused) when Unused > 0 ->
+%% X = case MaskVal band BitVal of
+%% 0 -> 0 ;
+%% _ -> 1
+%% end,
+%% [X | make_bits_of_int(BitVal, MaskVal bsr 1, Unused - 1)].
+
+
+
+%%----------------------------------------
+%% Decode the bitlist to names
+%%----------------------------------------
+
+
+decode_bitstring_NNL(BitList,NamedNumberList) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,0,[]).
+
+
+decode_bitstring_NNL([],_,_No,Result) ->
+ lists:reverse(Result);
+
+decode_bitstring_NNL([B|BitList],[{Name,No}|NamedNumberList],No,Result) ->
+ if
+ B == 0 ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result);
+ true ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[Name|Result])
+ end;
+decode_bitstring_NNL([1|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[{bit,No}|Result]);
+decode_bitstring_NNL([0|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result).
+
+
+%%============================================================================
+%% Octet string, ITU_T X.690 Chapter 8.7
+%%
+%% encode octet string
+%% The OctetList must be a flat list of integers in the range 0..255
+%% the function does not check this because it takes to much time
+%%============================================================================
+encode_octet_string(_C, OctetList, TagIn) when binary(OctetList) ->
+ encode_tags(TagIn, OctetList, size(OctetList));
+encode_octet_string(_C, OctetList, TagIn) when list(OctetList) ->
+ encode_tags(TagIn, OctetList, length(OctetList));
+encode_octet_string(C, {Name,OctetList}, TagIn) when atom(Name) ->
+ encode_octet_string(C, OctetList, TagIn).
+
+
+%%============================================================================
+%% decode octet string
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%
+%% Octet string is decoded as a restricted string
+%%============================================================================
+decode_octet_string(Buffer, Range, Tags) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_OCTET_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_OCTET_STRING,
+ Tags, [], old).
+
+%%============================================================================
+%% Null value, ITU_T X.690 Chapter 8.8
+%%
+%% encode NULL value
+%%============================================================================
+
+encode_null({Name, _Val}, TagIn) when atom(Name) ->
+ encode_tags(TagIn, [], 0);
+encode_null(_Val, TagIn) ->
+ encode_tags(TagIn, [], 0).
+
+%%============================================================================
+%% decode NULL value
+%% (Buffer, HasTag, TotalLen) -> {NULL, Remain, RemovedBytes}
+%%============================================================================
+
+decode_null(Tlv, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ case Val of
+ <<>> ->
+ 'NULL';
+ _ ->
+ exit({error,{asn1,{decode_null,Val}}})
+ end.
+
+%%============================================================================
+%% Object identifier, ITU_T X.690 Chapter 8.19
+%%
+%% encode Object Identifier value
+%%============================================================================
+
+encode_object_identifier({Name,Val}, TagIn) when atom(Name) ->
+ encode_object_identifier(Val, TagIn);
+encode_object_identifier(Val, TagIn) ->
+ encode_tags(TagIn, e_object_identifier(Val)).
+
+e_object_identifier({'OBJECT IDENTIFIER', V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname, V}) when atom(Cname), tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname, V}) when atom(Cname), list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%%%%%%%%%%%%%%%
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+e_object_identifier([E1, E2 | Tail]) ->
+ Head = 40*E1 + E2, % wow!
+ {H,Lh} = mk_object_val(Head),
+ {R,Lr} = enc_obj_id_tail(Tail, [], 0),
+ {[H|R], Lh+Lr}.
+
+enc_obj_id_tail([], Ack, Len) ->
+ {lists:reverse(Ack), Len};
+enc_obj_id_tail([H|T], Ack, Len) ->
+ {B, L} = mk_object_val(H),
+ enc_obj_id_tail(T, [B|Ack], Len+L).
+
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+%%e_object_identifier([E1, E2 | Tail]) ->
+%% Head = 40*E1 + E2, % wow!
+%% F = fun(Val, AckLen) ->
+%% {L, Ack} = mk_object_val(Val),
+%% {L, Ack + AckLen}
+%% end,
+%% {Octets, Len} = lists:mapfoldl(F, 0, [Head | Tail]).
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+
+
+%%============================================================================
+%% decode Object Identifier value
+%% (Buffer, HasTag, TotalLen) -> {{ObjId}, Remain, RemovedBytes}
+%%============================================================================
+
+decode_object_identifier(Tlv, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ [AddedObjVal|ObjVals] = dec_subidentifiers(Val,0,[]),
+ {Val1, Val2} = if
+ AddedObjVal < 40 ->
+ {0, AddedObjVal};
+ AddedObjVal < 80 ->
+ {1, AddedObjVal - 40};
+ true ->
+ {2, AddedObjVal - 80}
+ end,
+ list_to_tuple([Val1, Val2 | ObjVals]).
+
+dec_subidentifiers(<<>>,_Av,Al) ->
+ lists:reverse(Al);
+dec_subidentifiers(<<1:1,H:7,T/binary>>,Av,Al) ->
+ dec_subidentifiers(T,(Av bsl 7) + H,Al);
+dec_subidentifiers(<<H,T/binary>>,Av,Al) ->
+ dec_subidentifiers(T,0,[((Av bsl 7) + H)|Al]).
+
+
+%%============================================================================
+%% Restricted character string types, ITU_T X.690 Chapter 8.20
+%%
+%% encode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%%============================================================================
+%% The StringType arg is kept for future use but might be removed
+encode_restricted_string(_C, OctetList, _StringType, TagIn)
+ when binary(OctetList) ->
+ encode_tags(TagIn, OctetList, size(OctetList));
+encode_restricted_string(_C, OctetList, _StringType, TagIn)
+ when list(OctetList) ->
+ encode_tags(TagIn, OctetList, length(OctetList));
+encode_restricted_string(C,{Name,OctetL}, StringType, TagIn) when atom(Name)->
+ encode_restricted_string(C, OctetL, StringType, TagIn).
+
+%%============================================================================
+%% decode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_restricted_string(Buffer, Range, StringType, Tags) ->
+ decode_restricted_string(Buffer, Range, StringType, Tags, [], old).
+
+
+decode_restricted_string(Tlv, Range, StringType, TagsIn,
+ NamedNumberList, BinOrOld) ->
+ Val = match_tags(Tlv, TagsIn),
+ Val2 =
+ case Val of
+ PartList = [_H|_T] -> % constructed val
+ Bin = collect_parts(PartList),
+ decode_restricted(Bin, StringType,
+ NamedNumberList, BinOrOld);
+ Bin ->
+ decode_restricted(Bin, StringType,
+ NamedNumberList, BinOrOld)
+ end,
+ check_and_convert_restricted_string(Val2,StringType,Range,NamedNumberList,BinOrOld).
+
+
+
+% case StringType of
+% ?N_BIT_STRING when BinOrOld == bin ->
+% {concat_bit_binaries(AccVal, Val), AccRb+Rb};
+% _ when binary(Val),binary(AccVal) ->
+% {<<AccVal/binary,Val/binary>>,AccRb+Rb};
+% _ when binary(Val), AccVal==[] ->
+% {Val,AccRb+Rb};
+% _ ->
+% {AccVal++Val, AccRb+Rb}
+% end,
+
+
+
+decode_restricted(Bin, StringType, NamedNumberList,BinOrOld) ->
+ case StringType of
+ ?N_BIT_STRING ->
+ decode_bit_string2(Bin, NamedNumberList, BinOrOld);
+ ?N_UniversalString ->
+ mk_universal_string(binary_to_list(Bin));
+ ?N_BMPString ->
+ mk_BMP_string(binary_to_list(Bin));
+ _ ->
+ Bin
+ end.
+
+
+check_and_convert_restricted_string(Val,StringType,Range,NamedNumberList,_BinOrOld) ->
+ {StrLen,NewVal} = case StringType of
+ ?N_BIT_STRING when NamedNumberList /= [] ->
+ {no_check,Val};
+ ?N_BIT_STRING when list(Val) ->
+ {length(Val),Val};
+ ?N_BIT_STRING when tuple(Val) ->
+ {(size(element(2,Val))*8) - element(1,Val),Val};
+ _ when binary(Val) ->
+ {size(Val),binary_to_list(Val)};
+ _ when list(Val) ->
+ {length(Val), Val}
+ end,
+ case Range of
+ _ when StrLen == no_check ->
+ NewVal;
+ [] -> % No length constraint
+ NewVal;
+ {Lb,Ub} when StrLen >= Lb, Ub >= StrLen -> % variable length constraint
+ NewVal;
+ {{Lb,_Ub},[]} when StrLen >= Lb ->
+ NewVal;
+ {{Lb1,Ub1},{Lb2,Ub2}} when StrLen >= Lb1, StrLen =< Ub1;
+ StrLen =< Ub2, StrLen >= Lb2 ->
+ NewVal;
+ StrLen -> % fixed length constraint
+ NewVal;
+ {_,_} ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _Len when integer(_Len) ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _ -> % some strange constraint that we don't support yet
+ NewVal
+ end.
+
+
+%%============================================================================
+%% encode Universal string
+%%============================================================================
+
+encode_universal_string(C, {Name, Universal}, TagIn) when atom(Name) ->
+ encode_universal_string(C, Universal, TagIn);
+encode_universal_string(_C, Universal, TagIn) ->
+ OctetList = mk_uni_list(Universal),
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+mk_uni_list(In) ->
+ mk_uni_list(In,[]).
+
+mk_uni_list([],List) ->
+ lists:reverse(List);
+mk_uni_list([{A,B,C,D}|T],List) ->
+ mk_uni_list(T,[D,C,B,A|List]);
+mk_uni_list([H|T],List) ->
+ mk_uni_list(T,[H,0,0,0|List]).
+
+%%===========================================================================
+%% decode Universal strings
+%% (Buffer, Range, StringType, HasTag, LenIn) ->
+%% {String, Remain, RemovedBytes}
+%%===========================================================================
+
+decode_universal_string(Buffer, Range, Tags) ->
+ decode_restricted_string(Buffer, Range, ?N_UniversalString,
+ Tags, [], old).
+
+
+mk_universal_string(In) ->
+ mk_universal_string(In,[]).
+
+mk_universal_string([],Acc) ->
+ lists:reverse(Acc);
+mk_universal_string([0,0,0,D|T],Acc) ->
+ mk_universal_string(T,[D|Acc]);
+mk_universal_string([A,B,C,D|T],Acc) ->
+ mk_universal_string(T,[{A,B,C,D}|Acc]).
+
+
+%%============================================================================
+%% encode BMP string
+%%============================================================================
+
+encode_BMP_string(C, {Name,BMPString}, TagIn) when atom(Name)->
+ encode_BMP_string(C, BMPString, TagIn);
+encode_BMP_string(_C, BMPString, TagIn) ->
+ OctetList = mk_BMP_list(BMPString),
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+mk_BMP_list(In) ->
+ mk_BMP_list(In,[]).
+
+mk_BMP_list([],List) ->
+ lists:reverse(List);
+mk_BMP_list([{0,0,C,D}|T],List) ->
+ mk_BMP_list(T,[D,C|List]);
+mk_BMP_list([H|T],List) ->
+ mk_BMP_list(T,[H,0|List]).
+
+%%============================================================================
+%% decode (OctetList, Range(ignored), tag|notag) -> {ValList, RestList}
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+decode_BMP_string(Buffer, Range, Tags) ->
+ decode_restricted_string(Buffer, Range, ?N_BMPString,
+ Tags, [], old).
+
+mk_BMP_string(In) ->
+ mk_BMP_string(In,[]).
+
+mk_BMP_string([],US) ->
+ lists:reverse(US);
+mk_BMP_string([0,B|T],US) ->
+ mk_BMP_string(T,[B|US]);
+mk_BMP_string([C,D|T],US) ->
+ mk_BMP_string(T,[{0,0,C,D}|US]).
+
+
+%%============================================================================
+%% Generalized time, ITU_T X.680 Chapter 39
+%%
+%% encode Generalized time
+%%============================================================================
+
+encode_generalized_time(C, {Name,OctetList}, TagIn) when atom(Name) ->
+ encode_generalized_time(C, OctetList, TagIn);
+encode_generalized_time(_C, OctetList, TagIn) ->
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+%%============================================================================
+%% decode Generalized time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_generalized_time(Tlv, _Range, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ NewVal = case Val of
+ PartList = [_H|_T] -> % constructed
+ collect_parts(PartList);
+ Bin ->
+ Bin
+ end,
+ binary_to_list(NewVal).
+
+%%============================================================================
+%% Universal time, ITU_T X.680 Chapter 40
+%%
+%% encode UTC time
+%%============================================================================
+
+encode_utc_time(C, {Name,OctetList}, TagIn) when atom(Name) ->
+ encode_utc_time(C, OctetList, TagIn);
+encode_utc_time(_C, OctetList, TagIn) ->
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+%%============================================================================
+%% decode UTC time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_utc_time(Tlv, _Range, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ NewVal = case Val of
+ PartList = [_H|_T] -> % constructed
+ collect_parts(PartList);
+ Bin ->
+ Bin
+ end,
+ binary_to_list(NewVal).
+
+
+%%============================================================================
+%% Length handling
+%%
+%% Encode length
+%%
+%% encode_length(Int | indefinite) ->
+%% [<127]| [128 + Int (<127),OctetList] | [16#80]
+%%============================================================================
+
+encode_length(indefinite) ->
+ {[16#80],1}; % 128
+encode_length(L) when L =< 16#7F ->
+ {[L],1};
+encode_length(L) ->
+ Oct = minimum_octets(L),
+ Len = length(Oct),
+ if
+ Len =< 126 ->
+ {[ (16#80+Len) | Oct ],Len+1};
+ true ->
+ exit({error,{asn1, to_long_length_oct, Len}})
+ end.
+
+
+%% Val must be >= 0
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(0,Acc) ->
+ Acc;
+minimum_octets(Val, Acc) ->
+ minimum_octets((Val bsr 8),[Val band 16#FF | Acc]).
+
+
+%%===========================================================================
+%% Decode length
+%%
+%% decode_length(OctetList) -> {{indefinite, RestOctetsL}, NoRemovedBytes} |
+%% {{Length, RestOctetsL}, NoRemovedBytes}
+%%===========================================================================
+
+decode_length(<<1:1,0:7,T/binary>>) ->
+ {indefinite, T};
+decode_length(<<0:1,Length:7,T/binary>>) ->
+ {Length,T};
+decode_length(<<1:1,LL:7,T/binary>>) ->
+ <<Length:LL/unit:8,Rest/binary>> = T,
+ {Length,Rest}.
+
+
+
+%%-------------------------------------------------------------------------
+%% INTERNAL HELPER FUNCTIONS (not exported)
+%%-------------------------------------------------------------------------
+
+
+%% decoding postitive integer values.
+decode_integer2(Len,Bin = <<0:1,_:7,_Bs/binary>>) ->
+ <<Int:Len/unit:8>> = Bin,
+ Int;
+%% decoding negative integer values.
+decode_integer2(Len,<<1:1,B2:7,Bs/binary>>) ->
+ <<N:Len/unit:8>> = <<B2,Bs/binary>>,
+ Int = N - (1 bsl (8 * Len - 1)),
+ Int.
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+collect_parts(TlvList) ->
+ collect_parts(TlvList,[]).
+
+collect_parts([{_,L}|Rest],Acc) when list(L) ->
+ collect_parts(Rest,[collect_parts(L)|Acc]);
+collect_parts([{?N_BIT_STRING,<<Unused,Bits/binary>>}|Rest],_Acc) ->
+ collect_parts_bit(Rest,[Bits],Unused);
+collect_parts([{_T,V}|Rest],Acc) ->
+ collect_parts(Rest,[V|Acc]);
+collect_parts([],Acc) ->
+ list_to_binary(lists:reverse(Acc)).
+
+collect_parts_bit([{?N_BIT_STRING,<<Unused,Bits/binary>>}|Rest],Acc,Uacc) ->
+ collect_parts_bit(Rest,[Bits|Acc],Unused+Uacc);
+collect_parts_bit([],Acc,Uacc) ->
+ list_to_binary([Uacc|lists:reverse(Acc)]).
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_check.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_check.erl
new file mode 100644
index 0000000000..bd3d5e6d8b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_check.erl
@@ -0,0 +1,333 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_check.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt_check).
+
+-include("asn1_records.hrl").
+
+-export([check_bool/2,
+ check_int/3,
+ check_bitstring/3,
+ check_octetstring/2,
+ check_null/2,
+ check_objectidentifier/2,
+ check_objectdescriptor/2,
+ check_real/2,
+ check_enum/3,
+ check_restrictedstring/2]).
+
+-export([transform_to_EXTERNAL1990/1,
+ transform_to_EXTERNAL1994/1]).
+
+
+check_bool(_Bool,asn1_DEFAULT) ->
+ true;
+check_bool(Bool,Bool) when Bool == true; Bool == false ->
+ true;
+check_bool(_Bool1,Bool2) ->
+ throw({error,Bool2}).
+
+check_int(_,asn1_DEFAULT,_) ->
+ true;
+check_int(Value,Value,_) when integer(Value) ->
+ true;
+check_int(DefValue,Value,NNL) when atom(Value) ->
+ case lists:keysearch(Value,1,NNL) of
+ {value,{_,DefValue}} ->
+ true;
+ _ ->
+ throw({error,DefValue})
+ end;
+check_int(DefaultValue,_Value,_) ->
+ throw({error,DefaultValue}).
+
+% check_bitstring([H|T],[H|T],_) when integer(H) ->
+% true;
+% check_bitstring(V,V,_) when integer(V) ->
+% true;
+%% Two equal lists or integers
+check_bitstring(_,asn1_DEFAULT,_) ->
+ true;
+check_bitstring(V,V,_) ->
+ true;
+%% Default value as a list of 1 and 0 and user value as an integer
+check_bitstring(L=[H|T],Int,_) when integer(Int),integer(H) ->
+ case bit_list_to_int(L,length(T)) of
+ Int -> true;
+ _ -> throw({error,L,Int})
+ end;
+%% Default value as an integer, val as list
+check_bitstring(Int,Val,NBL) when integer(Int),list(Val) ->
+ BL = int_to_bit_list(Int,[],length(Val)),
+ check_bitstring(BL,Val,NBL);
+%% Default value and user value as lists of ones and zeros
+check_bitstring(L1=[H1|_T1],L2=[H2|_T2],NBL=[_H|_T]) when integer(H1),integer(H2) ->
+ L2new = remove_trailing_zeros(L2),
+ check_bitstring(L1,L2new,NBL);
+%% Default value as a list of 1 and 0 and user value as a list of atoms
+check_bitstring(L1=[H1|_T1],L2=[H2|_T2],NBL) when integer(H1),atom(H2) ->
+ case bit_list_to_nbl(L1,NBL,0,[]) of
+ L3 -> check_bitstring(L3,L2,NBL);
+ _ -> throw({error,L2})
+ end;
+%% Both default value and user value as a list of atoms
+check_bitstring(L1=[H1|T1],L2=[H2|_T2],_) when atom(H1),atom(H2) ->
+ length(L1) == length(L2),
+ case lists:member(H1,L2) of
+ true ->
+ check_bitstring1(T1,L2);
+ false -> throw({error,L2})
+ end;
+%% Default value as a list of atoms and user value as a list of 1 and 0
+check_bitstring(L1=[H1|_T1],L2=[H2|_T2],NBL) when atom(H1),integer(H2) ->
+ case bit_list_to_nbl(L2,NBL,0,[]) of
+ L3 ->
+ check_bitstring(L1,L3,NBL);
+ _ -> throw({error,L2})
+ end;
+%% User value in compact format
+check_bitstring(DefVal,CBS={_,_},NBL) ->
+ NewVal = cbs_to_bit_list(CBS),
+ check_bitstring(DefVal,NewVal,NBL);
+check_bitstring(DV,V,_) ->
+ throw({error,DV,V}).
+
+
+bit_list_to_int([0|Bs],ShL)->
+ bit_list_to_int(Bs,ShL-1) + 0;
+bit_list_to_int([1|Bs],ShL) ->
+ bit_list_to_int(Bs,ShL-1) + (1 bsl ShL);
+bit_list_to_int([],_) ->
+ 0.
+
+int_to_bit_list(0,Acc,0) ->
+ Acc;
+int_to_bit_list(Int,Acc,Len) ->
+ int_to_bit_list(Int bsr 1,[Int band 1|Acc],Len - 1).
+
+bit_list_to_nbl([0|T],NBL,Pos,Acc) ->
+ bit_list_to_nbl(T,NBL,Pos+1,Acc);
+bit_list_to_nbl([1|T],NBL,Pos,Acc) ->
+ case lists:keysearch(Pos,2,NBL) of
+ {value,{N,_}} ->
+ bit_list_to_nbl(T,NBL,Pos+1,[N|Acc]);
+ _ ->
+ throw({error,{no,named,element,at,pos,Pos}})
+ end;
+bit_list_to_nbl([],_,_,Acc) ->
+ Acc.
+
+remove_trailing_zeros(L2) ->
+ remove_trailing_zeros1(lists:reverse(L2)).
+remove_trailing_zeros1(L) ->
+ lists:reverse(lists:dropwhile(fun(0)->true;
+ (_) ->false
+ end,
+ L)).
+
+check_bitstring1([H|T],NBL) ->
+ case lists:member(H,NBL) of
+ true ->
+ check_bitstring1(T,NBL);
+ V -> throw({error,V})
+ end;
+check_bitstring1([],_) ->
+ true.
+
+cbs_to_bit_list({Unused,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Rest/binary>>}) when size(Rest) >= 1 ->
+ [B7,B6,B5,B4,B3,B2,B1,B0|cbs_to_bit_list({Unused,Rest})];
+cbs_to_bit_list({0,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1>>}) ->
+ [B7,B6,B5,B4,B3,B2,B1,B0];
+cbs_to_bit_list({Unused,Bin}) when size(Bin) == 1 ->
+ Used = 8-Unused,
+ <<Int:Used,_:Unused>> = Bin,
+ int_to_bit_list(Int,[],Used).
+
+
+check_octetstring(_,asn1_DEFAULT) ->
+ true;
+check_octetstring(L,L) ->
+ true;
+check_octetstring(L,Int) when list(L),integer(Int) ->
+ case integer_to_octetlist(Int) of
+ L -> true;
+ V -> throw({error,V})
+ end;
+check_octetstring(_,V) ->
+ throw({error,V}).
+
+integer_to_octetlist(Int) ->
+ integer_to_octetlist(Int,[]).
+integer_to_octetlist(0,Acc) ->
+ Acc;
+integer_to_octetlist(Int,Acc) ->
+ integer_to_octetlist(Int bsr 8,[(Int band 255)|Acc]).
+
+check_null(_,asn1_DEFAULT) ->
+ true;
+check_null('NULL','NULL') ->
+ true;
+check_null(_,V) ->
+ throw({error,V}).
+
+check_objectidentifier(_,asn1_DEFAULT) ->
+ true;
+check_objectidentifier(OI,OI) ->
+ true;
+check_objectidentifier(DOI,OI) when tuple(DOI),tuple(OI) ->
+ check_objectidentifier1(tuple_to_list(DOI),tuple_to_list(OI));
+check_objectidentifier(_,OI) ->
+ throw({error,OI}).
+
+check_objectidentifier1([V|Rest1],[V|Rest2]) ->
+ check_objectidentifier1(Rest1,Rest2,V);
+check_objectidentifier1([V1|Rest1],[V2|Rest2]) ->
+ case reserved_objectid(V2,[]) of
+ V1 ->
+ check_objectidentifier1(Rest1,Rest2,[V1]);
+ V ->
+ throw({error,V})
+ end.
+check_objectidentifier1([V|Rest1],[V|Rest2],Above) ->
+ check_objectidentifier1(Rest1,Rest2,[V|Above]);
+check_objectidentifier1([V1|Rest1],[V2|Rest2],Above) ->
+ case reserved_objectid(V2,Above) of
+ V1 ->
+ check_objectidentifier1(Rest1,Rest2,[V1|Above]);
+ V ->
+ throw({error,V})
+ end;
+check_objectidentifier1([],[],_) ->
+ true;
+check_objectidentifier1(_,V,_) ->
+ throw({error,object,identifier,V}).
+
+%% ITU-T Rec. X.680 Annex B - D
+reserved_objectid('itu-t',[]) -> 0;
+reserved_objectid('ccitt',[]) -> 0;
+%% arcs below "itu-t"
+reserved_objectid('recommendation',[0]) -> 0;
+reserved_objectid('question',[0]) -> 1;
+reserved_objectid('administration',[0]) -> 2;
+reserved_objectid('network-operator',[0]) -> 3;
+reserved_objectid('identified-organization',[0]) -> 4;
+
+reserved_objectid(iso,[]) -> 1;
+%% arcs below "iso", note that number 1 is not used
+reserved_objectid('standard',[1]) -> 0;
+reserved_objectid('member-body',[1]) -> 2;
+reserved_objectid('identified-organization',[1]) -> 3;
+
+reserved_objectid('joint-iso-itu-t',[]) -> 2;
+reserved_objectid('joint-iso-ccitt',[]) -> 2;
+
+reserved_objectid(_,_) -> false.
+
+
+check_objectdescriptor(_,asn1_DEFAULT) ->
+ true;
+check_objectdescriptor(OD,OD) ->
+ true;
+check_objectdescriptor(OD,OD) ->
+ throw({error,{not_implemented_yet,check_objectdescriptor}}).
+
+check_real(_,asn1_DEFAULT) ->
+ true;
+check_real(R,R) ->
+ true;
+check_real(_,_) ->
+ throw({error,{not_implemented_yet,check_real}}).
+
+check_enum(_,asn1_DEFAULT,_) ->
+ true;
+check_enum(Val,Val,_) ->
+ true;
+check_enum(Int,Atom,Enumerations) when integer(Int),atom(Atom) ->
+ case lists:keysearch(Atom,1,Enumerations) of
+ {value,{_,Int}} -> true;
+ _ -> throw({error,{enumerated,Int,Atom}})
+ end;
+check_enum(DefVal,Val,_) ->
+ throw({error,{enumerated,DefVal,Val}}).
+
+
+check_restrictedstring(_,asn1_DEFAULT) ->
+ true;
+check_restrictedstring(Val,Val) ->
+ true;
+check_restrictedstring([V|Rest1],[V|Rest2]) ->
+ check_restrictedstring(Rest1,Rest2);
+check_restrictedstring([V1|Rest1],[V2|Rest2]) ->
+ check_restrictedstring(V1,V2),
+ check_restrictedstring(Rest1,Rest2);
+%% tuple format of value
+check_restrictedstring({V1,V2},[V1,V2]) ->
+ true;
+check_restrictedstring([V1,V2],{V1,V2}) ->
+ true;
+%% quadruple format of value
+check_restrictedstring({V1,V2,V3,V4},[V1,V2,V3,V4]) ->
+ true;
+check_restrictedstring([V1,V2,V3,V4],{V1,V2,V3,V4}) ->
+ true;
+%% character string list
+check_restrictedstring(V1,V2) when list(V1),tuple(V2) ->
+ check_restrictedstring(V1,tuple_to_list(V2));
+check_restrictedstring(V1,V2) ->
+ throw({error,{restricted,string,V1,V2}}).
+
+transform_to_EXTERNAL1990(Val) when tuple(Val),size(Val) == 4 ->
+ transform_to_EXTERNAL1990(tuple_to_list(Val),[]);
+transform_to_EXTERNAL1990(Val) when tuple(Val) ->
+ %% Data already in ASN1 1990 format
+ Val.
+
+transform_to_EXTERNAL1990(['EXTERNAL'|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,['EXTERNAL'|Acc]);
+transform_to_EXTERNAL1990([{syntax,Syntax}|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,[asn1_NOVALUE,Syntax|Acc]);
+transform_to_EXTERNAL1990([{'presentation-context-id',PCid}|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,[PCid,asn1_NOVALUE|Acc]);
+transform_to_EXTERNAL1990([{'context-negotiation',Context_negot}|Rest],Acc) ->
+ {_,Presentation_Cid,Transfer_syntax} = Context_negot,
+ transform_to_EXTERNAL1990(Rest,[Transfer_syntax,Presentation_Cid|Acc]);
+transform_to_EXTERNAL1990([asn1_NOVALUE|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,[asn1_NOVALUE|Acc]);
+transform_to_EXTERNAL1990([Data_val_desc,Data_value],Acc) when list(Data_value)->
+ list_to_tuple(lists:reverse([{'octet-aligned',Data_value},
+ Data_val_desc|Acc]));
+transform_to_EXTERNAL1990([Data_value],Acc) when list(Data_value)->
+ list_to_tuple(lists:reverse([{'octet-aligned',Data_value}|Acc])).
+
+
+transform_to_EXTERNAL1994(V={'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}) ->
+ Identification =
+ case {DRef,IndRef} of
+ {DRef,asn1_NOVALUE} ->
+ {syntax,DRef};
+ {asn1_NOVALUE,IndRef} ->
+ {'presentation-context-id',IndRef};
+ _ ->
+ {'context-negotiation',
+ {'EXTERNAL_identification_context-negotiation',IndRef,DRef}}
+ end,
+ case Encoding of
+ {_,Val} when list(Val) ->
+ {'EXTERNAL',Identification,Data_v_desc,Val};
+ _ ->
+ V
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_driver_handler.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_driver_handler.erl
new file mode 100644
index 0000000000..7a986b5376
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_driver_handler.erl
@@ -0,0 +1,108 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_driver_handler.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+
+-module(asn1rt_driver_handler).
+
+-export([init/1,load_driver/0,unload_driver/0]).
+
+
+load_driver() ->
+ spawn(asn1rt_driver_handler, init, [self()]).
+
+init(From) ->
+ Port=
+ case load_driver("asn1_erl_drv") of
+ ok ->
+ open_named_port(From);
+ already_done ->
+ From ! driver_ready;
+ Error -> % if erl_ddll:load_driver fails
+ erl_ddll:unload_driver("asn1_erl_drv"),
+ From ! Error
+ end,
+ register_and_loop(Port).
+
+load_driver(DriverName) ->
+ case is_driver_loaded(DriverName) of
+ false ->
+ Dir = filename:join([code:priv_dir(asn1),"lib"]),
+ erl_ddll:load_driver(Dir,DriverName);
+ true ->
+ ok
+ end.
+
+
+is_driver_loaded(_Name) ->
+ case whereis(asn1_driver_owner) of
+ undefined ->
+ false;
+ _ ->
+ true
+ end.
+
+open_named_port(From) ->
+ case is_port_open(drv_complete) of
+ false ->
+ case catch open_port({spawn,"asn1_erl_drv"},[]) of
+ {'EXIT',Reason} ->
+ From ! {port_error,Reason};
+ Port ->
+ register(drv_complete,Port),
+ From ! driver_ready,
+ Port
+ end;
+ _ ->
+ From ! driver_ready,
+ ok
+ end.
+
+is_port_open(Name) ->
+ case whereis(Name) of
+ Port when port(Port) ->
+ true;
+ _ -> false
+ end.
+
+register_and_loop(Port) when port(Port) ->
+ register(asn1_driver_owner,self()),
+ loop();
+register_and_loop(_) ->
+ ok.
+
+loop() ->
+ receive
+ unload ->
+ case whereis(drv_complete) of
+ Port when port(Port) ->
+ port_close(Port);
+ _ -> ok
+ end,
+ erl_ddll:unload_driver("asn1_erl_drv"),
+ ok;
+ _ ->
+ loop()
+ end.
+
+unload_driver() ->
+ case whereis(asn1_driver_owner) of
+ Pid when pid(Pid) ->
+ Pid ! unload,
+ ok;
+ _ ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per.erl
new file mode 100644
index 0000000000..d531a165ae
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per.erl
@@ -0,0 +1,1609 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/2, fixextensions/2, setoptionals/1,
+ getext/1, getextension/2, skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/3, set_choice/3, encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_boolean/1,
+ decode_boolean/1, encode_length/2, decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1]).
+-export([encode_enumerated/3, decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_restricted_string/4, encode_restricted_string/5,
+ decode_restricted_string/4, decode_restricted_string/5,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([encode_UniversalString/2, decode_UniversalString/2,
+ encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ encode_VisibleString/2, decode_VisibleString/2,
+ encode_BMPString/2, decode_BMPString/2,
+ encode_IA5String/2, decode_IA5String/2,
+ encode_NumericString/2, decode_NumericString/2
+ ]).
+
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+ [{debug,choiceext},{bit,0}];
+setchoiceext(false) ->
+ [{debug,choiceext},{bit,1}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(true) ->
+ [{debug,ext},{bit,1}];
+setext(false) ->
+ [{debug,ext},{bit,0}].
+
+fixoptionals(OptList,Val) when tuple(Val) ->
+ fixoptionals(OptList,Val,[]);
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([],Val,Acc) ->
+ % return {Val,Opt}
+ {Val,lists:reverse(Acc)};
+fixoptionals([{_,Pos}|Ot],Val,Acc) ->
+ case element(Pos+1,Val) of
+ asn1_NOVALUE -> fixoptionals(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,[0|Acc]);
+ _ -> fixoptionals(Ot,Val,[1|Acc])
+ end.
+
+
+%setoptionals(OptList,Val) ->
+% Vlist = tuple_to_list(Val),
+% setoptionals(OptList,Vlist,1,[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,Acc1,Acc2) ->
+ % return {Val,Opt}
+ {list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]),lists:reverse(Acc1)}.
+
+setoptionals([H|T]) ->
+ [{bit,H}|setoptionals(T)];
+setoptionals([]) ->
+ [{debug,optionals}].
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+ [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_NumChoices,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_integer(Bytes,[{'ValueRange',{0,NumChoices-1}}]).
+
+getoptionals(Bytes,L,NumComp) when list(L) ->
+ {Blist,Bytes1} = getbits_as_list(length(L),Bytes),
+ {list_to_tuple(comptuple(Blist,L,NumComp,1)),Bytes1}.
+
+comptuple([Bh|Bt],[{_Name,Nr}|T],NumComp,Nr) ->
+ [Bh|comptuple(Bt,T,NumComp-1,Nr+1)];
+comptuple(Bl,[{Name,Tnr}|Tl],NumComp,Nr) ->
+ [0|comptuple(Bl,[{Name,Tnr}|Tl],NumComp-1,Nr+1)];
+comptuple(_B,_L,0,_Nr) ->
+ [];
+comptuple(B,O,N,Nr) ->
+ [0|comptuple(B,O,N-1,Nr+1)].
+
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+getbits_as_list(0,Bytes,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getbits_as_list(Num,Bytes,Acc) ->
+ {Bit,NewBytes} = getbit(Bytes),
+ getbits_as_list(Num-1,NewBytes,[Bit|Acc]).
+
+getbit(Bytes) ->
+% io:format("getbit:~p~n",[Bytes]),
+ getbit1(Bytes).
+
+getbit1({7,[H|T]}) ->
+ {H band 1,{0,T}};
+getbit1({Pos,[H|T]}) ->
+ {(H bsr (7-Pos)) band 1,{(Pos+1) rem 8,[H|T]}};
+getbit1(Bytes) when list(Bytes) ->
+ getbit1({0,Bytes}).
+
+%% This could be optimized
+getbits(Buffer,Num) ->
+% io:format("getbits:Buffer = ~p~nNum=~p~n",[Buffer,Num]),
+ getbits(Buffer,Num,0).
+
+getbits(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getbits(Buffer,Num,Acc) ->
+ {B,NewBuffer} = getbit(Buffer),
+ getbits(NewBuffer,Num-1,B + (Acc bsl 1)).
+
+
+getoctet(Bytes) when list(Bytes) ->
+ getoctet({0,Bytes});
+getoctet(Bytes) ->
+% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+ getoctet1(Bytes).
+
+getoctet1({0,[H|T]}) ->
+ {H,{0,T}};
+getoctet1({_Pos,[_,H|T]}) ->
+ {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,[_H|T]}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+getoctets(Buffer,Num) ->
+% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+ getoctets(Buffer,Num,0).
+
+getoctets(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getoctets(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+getoctets_as_list(Buffer,Num) ->
+ getoctets_as_list(Buffer,Num,[]).
+
+getoctets_as_list(Buffer,0,Acc) ->
+ {lists:reverse(Acc),Buffer};
+getoctets_as_list(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+ [{bit,0}, % the value is in the root set
+ encode_integer([{'ValueRange',{0,Len1-1}}],N)];
+ N when integer(N) ->
+ [{bit,0}]; % no encoding if only 0 or 1 alternative
+ false ->
+ [{bit,1}, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_integer([{'ValueRange',{0,Len-1}}],N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_,[],_) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_Constraint, Val) when list(Val) ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}];
+encode_open_type(_Constraint, Val) when binary(Val) ->
+ [encode_length(undefined,size(Val)),align,
+ {octets,binary_to_list(Val)}].
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _Constraint) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_NamedNumberList) when integer(V) ->
+ encode_integer(C,V).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer({Rc,_Ec},Val) ->
+ case (catch encode_integer(Rc,Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+ [{bit,1},encode_unconstrained_number(Val)];
+ Encoded ->
+ [{bit,0},Encoded]
+ end;
+encode_integer(C,Val ) when list(C) ->
+ case get_constraint(C,'SingleValue') of
+ no ->
+ encode_integer1(C,Val);
+ V when integer(V),V == Val ->
+ []; % a type restricted to a single value encodes to nothing
+ V when list(V) ->
+ case lists:member(Val,V) of
+ true ->
+ encode_integer1(C,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end.
+
+encode_integer1(C, Val) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ encode_unconstrained_number(Val);
+ {Lb,'MAX'} ->
+ encode_semi_constrained_number(Lb,Val);
+ %% positive with range
+ {Lb,Ub} when Val >= Lb,
+ Ub >= Val ->
+ encode_constrained_number(VR,Val)
+ end.
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,{Rc,_Ec}) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,Rc);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ V when list(V) ->
+ {Val,Buffer2} = decode_integer1(Buffer,C),
+ case lists:member(Val,V) of
+ true ->
+ {Val,Buffer2};
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_,_} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+% X.691:10.6 Encoding of a normally small non-negative whole number
+% Use this for encoding of CHOICE index if there is an extension marker in
+% the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+ [{bit,0},{bits,6,Val}];
+encode_small_number(Val) ->
+ [{bit,1},encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,{0,'MAX'})
+ end.
+
+% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Octs = eint_positive(Val2),
+ [encode_length(undefined,length(Octs)),{octets,Octs}].
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+ {bits,1,Val2};
+ Range =< 4 ->
+ {bits,2,Val2};
+ Range =< 8 ->
+ {bits,3,Val2};
+ Range =< 16 ->
+ {bits,4,Val2};
+ Range =< 32 ->
+ {bits,5,Val2};
+ Range =< 64 ->
+ {bits,6,Val2};
+ Range =< 128 ->
+ {bits,7,Val2};
+ Range =< 255 ->
+ {bits,8,Val2};
+ Range =< 256 ->
+ {octets,1,Val2};
+ Range =< 65536 ->
+ {octets,2,Val2};
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,3},length(Octs)),{octets,Octs}];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,4},length(Octs)),{octets,Octs}];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,5},length(Octs)),{octets,Octs}];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end.
+
+decode_constrained_number(Buffer,{Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+% Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}];
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}].
+
+%% used for positive Values which don't need a sign bit
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+%% used for signed positive values
+
+%eint(Val, Ack) ->
+% X = Val band 255,
+% Next = Val bsr 8,
+% if
+% Next == 0, X >= 127 ->
+% [0,X|Ack];
+% Next == 0 ->
+% [X|Ack];
+% true ->
+% eint(Next,[X|Ack])
+% end.
+
+%%% used for signed negative values
+%enint(Val, Acc) ->
+% NumOctets = if
+% -Val < 16#80 -> 1;
+% -Val < 16#8000 ->2;
+% -Val < 16#800000 ->3;
+% -Val < 16#80000000 ->4;
+% -Val < 16#8000000000 ->5;
+% -Val < 16#800000000000 ->6;
+% -Val < 16#80000000000000 ->7;
+% -Val < 16#8000000000000000 ->8;
+% -Val < 16#800000000000000000 ->9
+% end,
+% enint(Val,Acc,NumOctets).
+
+%enint(Val, Acc,0) ->
+% Acc;
+%enint(Val, Acc,NumOctets) ->
+% enint(Val bsr 8,[Val band 255|Acc],NumOctets-1).
+
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(Val,Acc) when Val > 0 ->
+ minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+minimum_octets(0,Acc) ->
+ Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+ {octet,Len band 16#7F};
+ Len < 16384 ->
+ {octets,2,2#1000000000000000 bor Len};
+ true ->
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length({Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number({Lb,Ub},Len);
+encode_length(SingleValue,_Len) when integer(SingleValue) ->
+ [].
+
+encode_small_length(Len) when Len =< 64 ->
+ [{bit,0},{bits,6,Len-1}];
+encode_small_length(Len) ->
+ [{bit,1},encode_length(undefined,Len)].
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ Buffer2 = align(Buffer),
+ {Bits,_} = getbits(Buffer2,2),
+ case Bits of
+ 2 ->
+ {Val,Bytes3} = getoctets(Buffer2,2),
+ {(Val band 16#3FFF),Bytes3};
+ 3 ->
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+ _ ->
+ {Val,Bytes3} = getoctet(Buffer2),
+ {Val band 16#7F,Bytes3}
+ end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+ % X.691:10.9.3.5
+decode_length(Buffer,{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+ case getbit(Buffer) of
+ {0,Remain} ->
+ getbits(Remain,7);
+ {1,_Remain} ->
+ {Val,Remain2} = getoctets(Buffer,2),
+ {Val band 2#0111111111111111, Remain2}
+ end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+% X.691:11
+encode_boolean({Name,Val}) when atom(Name) ->
+ encode_boolean(Val);
+encode_boolean(true) ->
+ {bit,1};
+encode_boolean(false) ->
+ {bit,0};
+encode_boolean(Val) ->
+ exit({error,{asn1,{encode_boolean,Val}}}).
+
+
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:12
+%% ENUMERATED
+%%
+%% encode_enumerated(C,Value,NamedNumberTup) -> CompleteList
+%%
+%%
+
+encode_enumerated(C,{Name,Value},NamedNumberList) when
+ atom(Name),list(NamedNumberList) ->
+ encode_enumerated(C,Value,NamedNumberList);
+
+%% ENUMERATED with extension mark
+encode_enumerated(_C,{asn1_enum,Value},{_Nlist1,Nlist2}) when Value >= length(Nlist2) ->
+ [{bit,1},encode_small_number(Value)];
+encode_enumerated(C,Value,{Nlist1,Nlist2}) ->
+ case enum_search(Value,Nlist1,0) of
+ NewV when integer(NewV) ->
+ [{bit,0},encode_integer(C,NewV)];
+ false ->
+ case enum_search(Value,Nlist2,0) of
+ ExtV when integer(ExtV) ->
+ [{bit,1},encode_small_number(ExtV)];
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end
+ end;
+
+encode_enumerated(C,Value,NamedNumberList) when list(NamedNumberList) ->
+ case enum_search(Value,NamedNumberList,0) of
+ NewV when integer(NewV) ->
+ encode_integer(C,NewV);
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end.
+
+%% returns the ordinal number from 0 ,1 ... in the list where Name is found
+%% or false if not found
+%%
+enum_search(Name,[Name|_NamedNumberList],Acc) ->
+ Acc;
+enum_search(Name,[_H|T],Acc) ->
+ enum_search(Name,T,Acc+1);
+enum_search(_,[],_) ->
+ false. % name not found !error
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+%% when the value is a list of named bits
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+encode_bit_string(C, [{bit,No} | RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes([{bit,No} | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+
+encode_bit_string(C, BitListValue, _NamedBitList) when list(BitListValue) ->
+ %% first remove any trailing zeroes
+ Bl1 = lists:dropwhile(fun(0)->true;(1)->false end,lists:reverse(BitListValue)),
+ BitList = [{bit,X} || X <- lists:reverse(Bl1)],
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ []; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ pad_list(V,BitList);
+ V when integer(V) -> % fixed length more than 16 bits
+ [align,pad_list(V,BitList)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ [encode_length({Lb,Ub},length(BitList)),align,BitList];
+ no ->
+ [encode_length(undefined,length(BitList)),align,BitList]
+ end;
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) ->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList).
+
+
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_to_named(Buffer,V,NamedNumberList);
+ V when integer(V) -> % fixed length 16 bits or less
+ Bytes2 = align(Buffer),
+ bit_list_to_named(Bytes2,V,NamedNumberList);
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList)
+ end.
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_to_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_to_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_to_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_to_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_to_named1(Pos+1,Bt,Names,Acc);
+bit_list_to_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_to_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_to_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_to_named1(_Pos,[],_Names,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+pad_list(0,BitList) ->
+ case BitList of
+ [] -> [];
+ _ -> exit({error,{asn1,{range_error,{bit_string,BitList}}}})
+ end;
+pad_list(N,[Bh|Bt]) ->
+ [Bh|pad_list(N-1,Bt)];
+pad_list(N,[]) ->
+ [{bit,0},pad_list(N-1,[])].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,{Name,Val}) when atom(Name) ->
+ encode_octet_string(C,false,Val);
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(_C,true,_Val) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(C,false,Val) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ 1 ->
+ [V] = Val,
+ {bits,8,V};
+ 2 ->
+ [V1,V2] = Val,
+ [{bits,8,V1},{bits,8,V2}];
+ Sv when Sv =<65535, Sv == length(Val) -> % fixed length
+ [align,{octets,Val}];
+ {Lb,Ub} ->
+ [encode_length({Lb,Ub},length(Val)),align,
+ {octets,Val}];
+ Sv when list(Sv) ->
+ [encode_length({hd(Sv),lists:max(Sv)},length(Val)),align,
+ {octets,Val}];
+ no ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}]
+ end.
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,C,false) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ {[],Bytes};
+ 1 ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+ 2 ->
+ {B1,Bytes2}= getbits(Bytes,8),
+ {B2,Bytes3}= getbits(Bytes2,8),
+ {[B1,B2],Bytes3};
+ {_,0} ->
+ {[],Bytes};
+ Sv when integer(Sv), Sv =<65535 -> % fixed length
+ Bytes2 = align(Bytes),
+ getoctets_as_list(Bytes2,Sv);
+ {Lb,Ub} ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ Sv when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ no ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+encode_restricted_string(aligned,StringType,C,Val) ->
+encode_restricted_string(aligned,StringType,C,false,Val).
+
+
+encode_restricted_string(aligned,StringType,C,_Ext,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,StringType,C,false,Val);
+encode_restricted_string(aligned,StringType,C,_Ext,Val) ->
+ Result = chars_encode(C,StringType,Val),
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} ->
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ 0 ->
+ [];
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ [align,Result];
+ {Ub,Lb} ->
+ [encode_length({Ub,Lb},length(Val)),align,Result];
+ Vl when list(Vl) ->
+ [encode_length({lists:min(Vl),lists:max(Vl)},length(Val)),align,Result];
+ no ->
+ [encode_length(undefined,length(Val)),align,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned,StringType,C) ->
+ decode_restricted_string(Bytes,aligned,StringType,C,false).
+
+decode_restricted_string(Bytes,aligned,StringType,C,_Ext) ->
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,C,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,C,Ub);
+ 0 ->
+ {[],Bytes};
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len)
+ end.
+
+
+
+encode_BMPString(C,Val) ->
+ encode_restricted_string(aligned,'BMPString',C,false,Val).
+decode_BMPString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'BMPString',C,false).
+
+encode_GeneralString(C,Val) ->
+ encode_restricted_string(aligned,'GeneralString',C,false,Val).
+decode_GeneralString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'GeneralString',C,false).
+
+encode_GraphicString(C,Val) ->
+ encode_restricted_string(aligned,'GraphicString',C,false,Val).
+decode_GraphicString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'GraphicString',C,false).
+
+encode_IA5String(C,Val) ->
+ encode_restricted_string(aligned,'IA5String',C,false,Val).
+decode_IA5String(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'IA5String',C,false).
+
+encode_NumericString(C,Val) ->
+ encode_restricted_string(aligned,'NumericString',C,false,Val).
+decode_NumericString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'NumericString',C,false).
+
+encode_PrintableString(C,Val) ->
+ encode_restricted_string(aligned,'PrintableString',C,false,Val).
+decode_PrintableString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'PrintableString',C,false).
+
+encode_TeletexString(C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,'TeletexString',C,false,Val).
+decode_TeletexString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'TeletexString',C,false).
+
+encode_UniversalString(C,Val) ->
+ encode_restricted_string(aligned,'UniversalString',C,false,Val).
+decode_UniversalString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'UniversalString',C,false).
+
+encode_VideotexString(C,Val) ->
+ encode_restricted_string(aligned,'VideotexString',C,false,Val).
+decode_VideotexString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'VideotexString',C,false).
+
+encode_VisibleString(C,Val) -> % equivalent with ISO646String
+ encode_restricted_string(aligned,'VisibleString',C,false,Val).
+decode_VisibleString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'VisibleString',C,false).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+chars_encode(C,StringType,Value) ->
+ case {StringType,get_constraint(C,'PermittedAlphabet')} of
+ {'UniversalString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+ _ ->
+ {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+ chars_encode2(Value,NumBits,CharOutTab)
+ end.
+
+chars_encode2([H|T],NumBits,{Min,Max,notab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,H-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|T],NumBits,{Min,Max,Tab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,element(H-Min+1,Tab)}|chars_encode2(T,NumBits,{Min,Max,Tab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+ [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+ [{bits,NumBits,element((A*B*C*D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_NumBits,{_Min,_Max,_Tab}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'GeneralString' ->
+ exit({error,{asn1,{not implemented,'GeneralString'}}});
+ 'GraphicString' ->
+ exit({error,{asn1,{not implemented,'GraphicString'}}});
+ 'TeletexString' ->
+ exit({error,{asn1,{not implemented,'TeletexString'}}});
+ 'VideotexString' ->
+ exit({error,{asn1,{not implemented,'VideotexString'}}});
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B > 2, B =< 4 -> 4;
+ B when B > 4, B =< 8 -> 8;
+ B when B > 8, B =< 16 -> 16;
+ B when B > 16, B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',C,Len) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ no ->
+ getBMPChars(Bytes,Len);
+ _ ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet constraint"}}})
+ end;
+chars_decode(Bytes,NumBits,StringType,C,Len) ->
+ CharInTab = get_CharInTab(C,StringType),
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result = case minimum_octets(Char+Min) of
+ [NewChar] -> NewChar;
+ [C1,C2] -> {0,0,C1,C2};
+ [C1,C2,C3] -> {0,C1,C2,C3};
+ [C1,C2,C3,C4] -> {C1,C2,C3,C4}
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val);
+encode_null(_) -> []. % encodes to nothing
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier(Val) ->
+ Octets = e_object_identifier(Val,notag),
+ [{debug,object_identifier},encode_length(undefined,length(Octets)),{octets,Octets}].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V},DoTag) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),list(V) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier(V,DoTag) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+
+% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail],_DoTag) when E1 =< 2 ->
+ Head = 40*E1 + E2, % weird
+ Res = e_object_elements([Head|Tail]),
+% dotag(DoTag,[6],elength(length(Res)+1),[Head|Res]),
+ Res.
+
+e_object_elements([]) ->
+ [];
+e_object_elements([H|T]) ->
+ lists:append(e_object_element(H),e_object_elements(T)).
+
+e_object_element(Num) when Num < 128 ->
+ [Num];
+% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+complete(InList) when list(InList) ->
+ complete(InList,[],0);
+complete(InList) ->
+ complete([InList],[],0).
+
+complete([{debug,_}|T], Acc, Acclen) ->
+ complete(T,Acc,Acclen);
+complete([H|T],Acc,Acclen) when list(H) ->
+ complete(lists:concat([H,T]),Acc,Acclen);
+
+
+complete([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+ Newval = case N of
+ 1 ->
+ Val4 = Val band 16#FF,
+ [Val4];
+ 2 ->
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val3,Val4];
+ 3 ->
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val2,Val3,Val4];
+ 4 ->
+ Val1 = (Val bsr 24) band 16#FF,
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val1,Val2,Val3,Val4]
+ end,
+ complete([{octets,Newval}|T],Acc,Acclen);
+
+complete([{octets,Oct}|T],[],_Acclen) when list(Oct) ->
+ complete(T,lists:reverse(Oct),0);
+complete([{octets,Oct}|T],[Hacc|Tacc],Acclen) when list(Oct) ->
+ Rest = 8 - Acclen,
+ if
+ Rest == 8 ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc|Tacc]]),0);
+ true ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc bsl Rest|Tacc]]),0)
+ end;
+
+complete([{bit,Val}|T], Acc, Acclen) ->
+ complete([{bits,1,Val}|T],Acc,Acclen);
+complete([{octet,Val}|T], Acc, Acclen) ->
+ complete([{octets,1,Val}|T],Acc,Acclen);
+
+complete([{bits,N,Val}|T], Acc, 0) when N =< 8 ->
+ complete(T,[Val|Acc],N);
+complete([{bits,N,Val}|T], [Hacc|Tacc], Acclen) when N =< 8 ->
+ Rest = 8 - Acclen,
+ if
+ Rest >= N ->
+ complete(T,[(Hacc bsl N) + Val|Tacc],(Acclen+N) rem 8);
+ true ->
+ Diff = N - Rest,
+ NewHacc = (Hacc bsl Rest) + (Val bsr Diff),
+ Mask = element(Diff,{1,3,7,15,31,63,127,255}),
+ complete(T,[(Val band Mask),NewHacc|Tacc],(Acclen+N) rem 8)
+ end;
+complete([{bits,N,Val}|T], Acc, Acclen) -> % N > 8
+ complete([{bits,N-8,Val bsr 8},{bits,8,Val band 255}|T],Acc,Acclen);
+
+complete([align|T],Acc,0) ->
+ complete(T,Acc,0);
+complete([align|T],[Hacc|Tacc],Acclen) ->
+ Rest = 8 - Acclen,
+ complete(T,[Hacc bsl Rest|Tacc],0);
+complete([{octets,_N,Val}|T],Acc,Acclen) when list(Val) -> % no security check here
+ complete([{octets,Val}|T],Acc,Acclen);
+complete([],Acc,0) ->
+ lists:reverse(Acc);
+complete([],[Hacc|Tacc],Acclen) when Acclen > 0->
+ Rest = 8 - Acclen,
+ NewHacc = Hacc bsl Rest,
+ lists:reverse([NewHacc|Tacc]).
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin.erl
new file mode 100644
index 0000000000..08a78165a2
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin.erl
@@ -0,0 +1,2182 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per_bin.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per_bin).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/2, fixoptionals/3,
+ fixextensions/2,
+ getext/1, getextension/2, skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/2, getoptionals2/2, set_choice/3, encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_small_number/1, encode_boolean/1,
+ decode_boolean/1, encode_length/2, decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1,
+ decode_compact_bit_string/3]).
+-export([decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([encode_UniversalString/2, decode_UniversalString/2,
+ encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ encode_VisibleString/2, decode_VisibleString/2,
+ encode_BMPString/2, decode_BMPString/2,
+ encode_IA5String/2, decode_IA5String/2,
+ encode_NumericString/2, decode_NumericString/2,
+ encode_ObjectDescriptor/2, decode_ObjectDescriptor/1
+ ]).
+-export([complete_bytes/1]).
+
+-define('16K',16384).
+-define('32K',32768).
+-define('64K',65536).
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%% converts a list to a record if necessary
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple;
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]).
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+ [{debug,choiceext},{bits,1,0}];
+setchoiceext(false) ->
+ [{debug,choiceext},{bits,1,1}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(false) ->
+ [{debug,ext},{bits,1,0}];
+setext(true) ->
+ [{debug,ext},{bits,1,1}].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This version of fixoptionals/2 are left only because of
+%% backward compatibility with older generates
+
+fixoptionals(OptList,Val) when tuple(Val) ->
+ fixoptionals1(OptList,Val,[]);
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals1(OptList,Val,1,[],[]).
+
+fixoptionals1([],Val,Acc) ->
+ %% return {Val,Opt}
+ {Val,lists:reverse(Acc)};
+fixoptionals1([{_,Pos}|Ot],Val,Acc) ->
+ case element(Pos+1,Val) of
+ asn1_NOVALUE -> fixoptionals1(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals1(Ot,Val,[0|Acc]);
+ _ -> fixoptionals1(Ot,Val,[1|Acc])
+ end.
+
+
+fixoptionals1([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals1(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals1([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals1(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals1(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals1(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals1([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals1([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals1([],[],_,Acc1,Acc2) ->
+ % return {Val,Opt}
+ {list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]),lists:reverse(Acc1)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This is the new fixoptionals/3 which is used by the new generates
+%%
+fixoptionals(OptList,OptLength,Val) when tuple(Val) ->
+ Bits = fixoptionals(OptList,Val,0),
+ {Val,{bits,OptLength,Bits}};
+
+fixoptionals([],_Val,Acc) ->
+ %% Optbits
+ Acc;
+fixoptionals([Pos|Ot],Val,Acc) ->
+ case element(Pos,Val) of
+ asn1_NOVALUE -> fixoptionals(Ot,Val,Acc bsl 1);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,Acc bsl 1);
+ _ -> fixoptionals(Ot,Val,(Acc bsl 1) + 1)
+ end.
+
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when binary(Bytes) ->
+ getbit({0,Bytes});
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+ [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_constrained_number(Bytes,{0,NumChoices-1}).
+
+%% old version kept for backward compatibility with generates from R7B
+getoptionals(Bytes,NumOpt) ->
+ {Blist,Bytes1} = getbits_as_list(NumOpt,Bytes),
+ {list_to_tuple(Blist),Bytes1}.
+
+%% new version used in generates from r8b_patch/3 and later
+getoptionals2(Bytes,NumOpt) ->
+ getbits(Bytes,NumOpt).
+
+
+%% getbits_as_binary(Num,Bytes) -> {{Unused,BinBits},RestBytes},
+%% Num = integer(),
+%% Bytes = list() | tuple(),
+%% Unused = integer(),
+%% BinBits = binary(),
+%% RestBytes = tuple()
+getbits_as_binary(Num,Bytes) when binary(Bytes) ->
+ getbits_as_binary(Num,{0,Bytes});
+getbits_as_binary(0,Buffer) ->
+ {{0,<<>>},Buffer};
+getbits_as_binary(Num,{0,Bin}) when Num > 16 ->
+ Used = Num rem 8,
+ Pad = (8 - Used) rem 8,
+% Nbytes = Num div 8,
+ <<Bits:Num,_:Pad,RestBin/binary>> = Bin,
+ {{Pad,<<Bits:Num,0:Pad>>},RestBin};
+getbits_as_binary(Num,Buffer={_Used,_Bin}) -> % Unaligned buffer
+ %% Num =< 16,
+ {Bits2,Buffer2} = getbits(Buffer,Num),
+ Pad = (8 - (Num rem 8)) rem 8,
+ {{Pad,<<Bits2:Num,0:Pad>>},Buffer2}.
+
+
+% integer_from_list(Int,[],BigInt) ->
+% BigInt;
+% integer_from_list(Int,[H|T],BigInt) when Int < 8 ->
+% (BigInt bsl Int) bor (H bsr (8-Int));
+% integer_from_list(Int,[H|T],BigInt) ->
+% integer_from_list(Int-8,T,(BigInt bsl 8) bor H).
+
+getbits_as_list(Num,Bytes) when binary(Bytes) ->
+ getbits_as_list(Num,{0,Bytes},[]);
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+%% If buffer is empty and nothing more will be picked.
+getbits_as_list(0, B, Acc) ->
+ {lists:reverse(Acc),B};
+%% If first byte in buffer is full and at least one byte will be picked,
+%% then pick one byte.
+getbits_as_list(N,{0,Bin},Acc) when N >= 8 ->
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Rest/binary>> = Bin,
+ getbits_as_list(N-8,{0,Rest},[B0,B1,B2,B3,B4,B5,B6,B7|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 4, Used =< 4 ->
+ NewUsed = Used + 4,
+ Rem = 8 - NewUsed,
+ <<_:Used,B3:1,B2:1,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-4,{NewUsed rem 8,NewRest},[B0,B1,B2,B3|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 2, Used =< 6 ->
+ NewUsed = Used + 2,
+ Rem = 8 - NewUsed,
+ <<_:Used,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-2,{NewUsed rem 8,NewRest},[B0,B1|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when Used =< 7 ->
+ NewUsed = Used + 1,
+ Rem = 8 - NewUsed,
+ <<_:Used,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-1,{NewUsed rem 8,NewRest},[B0|Acc]).
+
+
+getbit({7,<<_:7,B:1,Rest/binary>>}) ->
+ {B,{0,Rest}};
+getbit({0,Buffer = <<B:1,_:7,_/binary>>}) ->
+ {B,{1,Buffer}};
+getbit({Used,Buffer}) ->
+ Unused = (8 - Used) - 1,
+ <<_:Used,B:1,_:Unused,_/binary>> = Buffer,
+ {B,{Used+1,Buffer}};
+getbit(Buffer) when binary(Buffer) ->
+ getbit({0,Buffer}).
+
+
+getbits({0,Buffer},Num) when (Num rem 8) == 0 ->
+ <<Bits:Num,Rest/binary>> = Buffer,
+ {Bits,{0,Rest}};
+getbits({Used,Bin},Num) ->
+ NumPlusUsed = Num + Used,
+ NewUsed = NumPlusUsed rem 8,
+ Unused = (8-NewUsed) rem 8,
+ case Unused of
+ 0 ->
+ <<_:Used,Bits:Num,Rest/binary>> = Bin,
+ {Bits,{0,Rest}};
+ _ ->
+ Bytes = NumPlusUsed div 8,
+ <<_:Used,Bits:Num,_UBits:Unused,_/binary>> = Bin,
+ <<_:Bytes/binary,Rest/binary>> = Bin,
+ {Bits,{NewUsed,Rest}}
+ end;
+getbits(Bin,Num) when binary(Bin) ->
+ getbits({0,Bin},Num).
+
+
+
+% getoctet(Bytes) when list(Bytes) ->
+% getoctet({0,Bytes});
+% getoctet(Bytes) ->
+% %% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+% getoctet1(Bytes).
+
+% getoctet1({0,[H|T]}) ->
+% {H,{0,T}};
+% getoctet1({Pos,[_,H|T]}) ->
+% {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,<<_H,T/binary>>}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as an integer with bit significance as in buffer.
+getoctets({0,Buffer},Num) ->
+ <<Val:Num/integer-unit:8,RestBin/binary>> = Buffer,
+ {Val,{0,RestBin}};
+getoctets({U,<<_Padding,Rest/binary>>},Num) when U /= 0 ->
+ getoctets({0,Rest},Num);
+getoctets(Buffer,Num) when binary(Buffer) ->
+ getoctets({0,Buffer},Num).
+% getoctets(Buffer,Num) ->
+% %% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+% getoctets(Buffer,Num,0).
+
+% getoctets(Buffer,0,Acc) ->
+% {Acc,Buffer};
+% getoctets(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+% getoctets_as_list(Buffer,Num) ->
+% getoctets_as_list(Buffer,Num,[]).
+
+% getoctets_as_list(Buffer,0,Acc) ->
+% {lists:reverse(Acc),Buffer};
+% getoctets_as_list(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as a binary
+getoctets_as_bin({0,Bin},Num)->
+ <<Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin({_U,Bin},Num) ->
+ <<_Padding,Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin(Bin,Num) when binary(Bin) ->
+ getoctets_as_bin({0,Bin},Num).
+
+%% same as above but returns octets as a List
+getoctets_as_list(Buffer,Num) ->
+ {Bin,Buffer2} = getoctets_as_bin(Buffer,Num),
+ {binary_to_list(Bin),Buffer2}.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+ [{bits,1,0}, % the value is in the root set
+ encode_integer([{'ValueRange',{0,Len1-1}}],N)];
+ N when integer(N) ->
+ [{bits,1,0}]; % no encoding if only 0 or 1 alternative
+ false ->
+ [{bits,1,1}, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_integer([{'ValueRange',{0,Len-1}}],N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_Alt,[],_Tag) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_fragmented_XXX; decode of values encoded fragmented according
+%% to ITU-T X.691 clause 10.9.3.8. The unit (XXX) is either bits, octets,
+%% characters or number of components (in a choice,sequence or similar).
+%% Buffer is a buffer {Used, Bin}.
+%% C is the constrained length.
+%% If the buffer is not aligned, this function does that.
+decode_fragmented_bits({0,Buffer},C) ->
+ decode_fragmented_bits(Buffer,C,[]);
+decode_fragmented_bits({_N,<<_,Bs/binary>>},C) ->
+ decode_fragmented_bits(Bs,C,[]).
+
+decode_fragmented_bits(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin, Len * ?'16K'),
+ decode_fragmented_bits(Bin2,C,[Value,Acc]);
+decode_fragmented_bits(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ BinBits = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int),C == size(BinBits) ->
+ {BinBits,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ {BinBits,{0,Bin}}
+ end;
+decode_fragmented_bits(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ Result = {BinBits,{Used,_Rest}} =
+ case (Len rem 8) of
+ 0 ->
+ <<Value:Len/binary-unit:1,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Value|Acc])),{0,Bin2}};
+ Rem ->
+ Bytes = Len div 8,
+ U = 8 - Rem,
+ <<Value:Bytes/binary-unit:8,Bits1:Rem,Bits2:U,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Bits1 bsl U,Value|Acc])),
+ {Rem,<<Bits2,Bin2/binary>>}}
+ end,
+ case C of
+ Int when integer(Int),C == (size(BinBits) - ((8 - Used) rem 8)) ->
+ Result;
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ Result
+ end.
+
+
+decode_fragmented_octets({0,Bin},C) ->
+ decode_fragmented_octets(Bin,C,[]);
+decode_fragmented_octets({_N,<<_,Bs/binary>>},C) ->
+ decode_fragmented_octets(Bs,C,[]).
+
+decode_fragmented_octets(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin,Len * ?'16K'),
+ decode_fragmented_octets(Bin2,C,[Value,Acc]);
+decode_fragmented_octets(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ Octets = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int), C == size(Octets) ->
+ {Octets,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,Octets}}});
+ _ ->
+ {Octets,{0,Bin}}
+ end;
+decode_fragmented_octets(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ <<Value:Len/binary-unit:8,Bin2/binary>> = Bin,
+ BinOctets = list_to_binary(lists:reverse([Value|Acc])),
+ case C of
+ Int when integer(Int),size(BinOctets) == Int ->
+ {BinOctets,Bin2};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinOctets}}});
+ _ ->
+ {BinOctets,Bin2}
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_C, Val) when list(Val) ->
+ Bin = list_to_binary(Val),
+ [encode_length(undefined,size(Bin)),{octets,Bin}]; % octets implies align
+encode_open_type(_C, Val) when binary(Val) ->
+ [encode_length(undefined,size(Val)),{octets,Val}]. % octets implies align
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _C) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_bin(Bytes2,Len).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_NamedNumberList) when integer(V) ->
+ encode_integer(C,V);
+encode_integer(C,{Name,V},NamedNumberList) when atom(Name) ->
+ encode_integer(C,V,NamedNumberList).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer([{Rc,_Ec}],Val) when tuple(Rc) -> % XXX when is this invoked? First argument most often a list,...Ok this is the extension case...but it doesn't work.
+ case (catch encode_integer([Rc],Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+ [{bits,1,1},encode_unconstrained_number(Val)];
+ Encoded ->
+ [{bits,1,0},Encoded]
+ end;
+encode_integer(C,Val ) when list(C) ->
+ case get_constraint(C,'SingleValue') of
+ no ->
+ encode_integer1(C,Val);
+ V when integer(V),V == Val ->
+ []; % a type restricted to a single value encodes to nothing
+ V when list(V) ->
+ case lists:member(Val,V) of
+ true ->
+ encode_integer1(C,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end.
+
+encode_integer1(C, Val) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ encode_unconstrained_number(Val);
+ {Lb,'MAX'} ->
+ encode_semi_constrained_number(Lb,Val);
+ %% positive with range
+ {Lb,Ub} when Val >= Lb,
+ Ub >= Val ->
+ encode_constrained_number(VR,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,VR,Val}}})
+ end.
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,[{Rc,_Ec}]) when tuple(Rc) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,[Rc]);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ V when list(V) ->
+ {Val,Buffer2} = decode_integer1(Buffer,C),
+ case lists:member(Val,V) of
+ true ->
+ {Val,Buffer2};
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_,_} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+ % X.691:10.6 Encoding of a normally small non-negative whole number
+ % Use this for encoding of CHOICE index if there is an extension marker in
+ % the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+% [{bits,1,0},{bits,6,Val}];
+ [{bits,7,Val}]; % same as above but more efficient
+encode_small_number(Val) ->
+ [{bits,1,1},encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,0)
+ end.
+
+%% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Oct = eint_positive(Val2),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ true ->
+ [encode_length(undefined,Len),{octets,Oct}]
+ end.
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+ {bits,1,Val2};
+ Range =< 4 ->
+ {bits,2,Val2};
+ Range =< 8 ->
+ {bits,3,Val2};
+ Range =< 16 ->
+ {bits,4,Val2};
+ Range =< 32 ->
+ {bits,5,Val2};
+ Range =< 64 ->
+ {bits,6,Val2};
+ Range =< 128 ->
+ {bits,7,Val2};
+ Range =< 255 ->
+ {bits,8,Val2};
+ Range =< 256 ->
+ {octets,[Val2]};
+ Range =< 65536 ->
+ {octets,<<Val2:16>>};
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+ [{bits,2,length(Octs)-1},{octets,Octs}];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ [{bits,2,length(Octs)-1},{octets,Octs}];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ [{bits,3,length(Octs)-1},{octets,Octs}];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end;
+encode_constrained_number(Range,Val) ->
+ exit({error,{asn1,{integer_range,Range,value,Val}}}).
+
+
+decode_constrained_number(Buffer,{Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+ % Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+%% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ true ->
+ [encode_length(undefined,Len),{octets,Oct}]
+ end;
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ true ->
+ [encode_length(undefined,Len),{octets,Oct}]
+ end.
+
+
+%% used for positive Values which don't need a sign bit
+%% returns a binary
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+% minimum_octets(Val) ->
+% minimum_octets(Val,[]).
+
+% minimum_octets(Val,Acc) when Val > 0 ->
+% minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+% minimum_octets(0,Acc) ->
+% Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+ {octets,[Len]};
+ Len < 16384 ->
+ {octets,<<2:2,Len:14>>};
+ true -> % should be able to endode length >= 16384
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length(Vr={Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number(Vr,Len);
+encode_length({Lb,_Ub},Len) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ encode_length(undefined,Len);
+encode_length({Vr={Lb,Ub},[]},Len) when Ub =< 65535 ,Lb >= 0 ->
+ %% constrained extensible
+ [{bits,1,0},encode_constrained_number(Vr,Len)];
+encode_length(SingleValue,_Len) when integer(SingleValue) ->
+ [].
+
+%% X.691 10.9.3.4 (only used for length of bitmap that prefixes extension
+%% additions in a sequence or set
+encode_small_length(Len) when Len =< 64 ->
+%% [{bits,1,0},{bits,6,Len-1}];
+ {bits,7,Len-1}; % the same as above but more efficient
+encode_small_length(Len) ->
+ [{bits,1,1},encode_length(undefined,Len)].
+
+% decode_small_length({Used,<<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>>}) ->
+% case Buffer of
+% <<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>> ->
+% {Num,
+% case getbit(Buffer) of
+% {0,Remain} ->
+% {Bits,Remain2} = getbits(Remain,6),
+% {Bits+1,Remain2};
+% {1,Remain} ->
+% decode_length(Remain,undefined)
+% end.
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ {0,Buffer2} = align(Buffer),
+ case Buffer2 of
+ <<0:1,Oct:7,Rest/binary>> ->
+ {Oct,{0,Rest}};
+ <<2:2,Val:14,Rest/binary>> ->
+ {Val,{0,Rest}};
+ <<3:2,_:14,_Rest/binary>> ->
+ %% this case should be fixed
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}})
+ end;
+%% {Bits,_} = getbits(Buffer2,2),
+% case Bits of
+% 2 ->
+% {Val,Bytes3} = getoctets(Buffer2,2),
+% {(Val band 16#3FFF),Bytes3};
+% 3 ->
+% exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+% _ ->
+% {Val,Bytes3} = getoctet(Buffer2),
+% {Val band 16#7F,Bytes3}
+% end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+decode_length(_,{Lb,_}) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ exit({error,{asn1,{decode_length,{nyi,above_64K}}}});
+decode_length(Buffer,{{Lb,Ub},[]}) ->
+ case getbit(Buffer) of
+ {0,Buffer2} ->
+ decode_length(Buffer2, {Lb,Ub})
+ end;
+
+
+%When does this case occur with {_,_Lb,Ub} ??
+% X.691:10.9.3.5
+decode_length({Used,Bin},{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub NOTE! this case does not cover case when Ub > 65535
+ Unused = (8-Used) rem 8,
+ case Bin of
+ <<_:Used,0:1,Val:7,R:Unused,Rest/binary>> ->
+ {Val,{Used,<<R,Rest/binary>>}};
+ <<_:Used,_:Unused,2:2,Val:14,Rest/binary>> ->
+ {Val, {0,Rest}};
+ <<_:Used,_:Unused,3:2,_:14,_Rest/binary>> ->
+ exit({error,{asn1,{decode_length,{nyi,length_above_64K}}}})
+ end;
+% decode_length(Buffer,{_,_Lb,Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+% case getbit(Buffer) of
+% {0,Remain} ->
+% getbits(Remain,7);
+% {1,Remain} ->
+% {Val,Remain2} = getoctets(Buffer,2),
+% {Val band 2#0111111111111111, Remain2}
+% end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+ % X.691:11
+encode_boolean(true) ->
+ {bits,1,1};
+encode_boolean(false) ->
+ {bits,1,0};
+encode_boolean({Name,Val}) when atom(Name) ->
+ encode_boolean(Val);
+encode_boolean(Val) ->
+ exit({error,{asn1,{encode_boolean,Val}}}).
+
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+
+%% when the value is a list of {Unused,BinBits}, where
+%% Unused = integer(),
+%% BinBits = binary().
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList) when integer(Unused),
+ binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList);
+
+%% when the value is a list of named bits
+encode_bit_string(C, LoNB=[FirstVal | _RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes(LoNB, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+encode_bit_string(C, BL=[{bit,_No} | _RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes(BL, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+
+% encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+% Bl1 =
+% case NamedBitList of
+% [] -> % dont remove trailing zeroes
+% BitListValue;
+% _ -> % first remove any trailing zeroes
+% lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+% lists:reverse(BitListValue)))
+% end,
+% BitList = [{bit,X} || X <- Bl1],
+% %% BListLen = length(BitList),
+% case get_constraint(C,'SizeConstraint') of
+% 0 -> % fixed length
+% []; % nothing to encode
+% V when integer(V),V=<16 -> % fixed length 16 bits or less
+% pad_list(V,BitList);
+% V when integer(V) -> % fixed length 16 bits or more
+% [align,pad_list(V,BitList)]; % should be another case for V >= 65537
+% {Lb,Ub} when integer(Lb),integer(Ub) ->
+% [encode_length({Lb,Ub},length(BitList)),align,BitList];
+% no ->
+% [encode_length(undefined,length(BitList)),align,BitList];
+% Sc -> % extension marker
+% [encode_length(Sc,length(BitList)),align,BitList]
+% end;
+encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+ BitListToBinary =
+ %% fun that transforms a list of 1 and 0 to a tuple:
+ %% {UnusedBitsInLastByte, Binary}
+ fun([H|T],Acc,N,Fun) ->
+ Fun(T,(Acc bsl 1)+H,N+1,Fun);
+ ([],Acc,N,_) ->
+ Unused = (8 - (N rem 8)) rem 8,
+ {Unused,<<Acc:N,0:Unused>>}
+ end,
+ UnusedAndBin =
+ case NamedBitList of
+ [] -> % dont remove trailing zeroes
+ BitListToBinary(BitListValue,0,0,BitListToBinary);
+ _ ->
+ BitListToBinary(lists:reverse(
+ lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitListValue))),
+ 0,0,BitListToBinary)
+ end,
+ encode_bin_bit_string(C,UnusedAndBin,NamedBitList);
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) when integer(IntegerVal)->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a tuple
+encode_bit_string(C,{Name,Val}, NamedBitList) when atom(Name) ->
+ encode_bit_string(C,Val,NamedBitList).
+
+
+%% encode_bin_bit_string/3, when value is a tuple of Unused and BinBits.
+%% Unused = integer(),i.e. number unused bits in least sign. byte of
+%% BinBits = binary().
+
+
+encode_bin_bit_string(C,UnusedAndBin={_Unused,_BinBits},NamedBitList) ->
+ Constr = get_constraint(C,'SizeConstraint'),
+ UnusedAndBin1 = {Unused1,Bin1} =
+ remove_trailing_bin(NamedBitList,UnusedAndBin,lower_bound(Constr)),
+ case Constr of
+ 0 ->
+ [];
+ V when integer(V),V=<16 ->
+ {Unused2,Bin2} = pad_list(V,UnusedAndBin1),
+ <<BitVal:V,_:Unused2>> = Bin2,
+ {bits,V,BitVal};
+ V when integer(V) ->
+ [align, pad_list(V, UnusedAndBin1)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ [encode_length({Lb,Ub},size(Bin1)*8 - Unused1),
+ align,UnusedAndBin1];
+ no ->
+ [encode_length(undefined,size(Bin1)*8 - Unused1),
+ align,UnusedAndBin1];
+ Sc ->
+ [encode_length(Sc,size(Bin1)*8 - Unused1),
+ align,UnusedAndBin1]
+ end.
+
+remove_trailing_bin([], {Unused,Bin},_) ->
+ {Unused,Bin};
+remove_trailing_bin(NamedNumberList, {_Unused,Bin},C) ->
+ Size = size(Bin)-1,
+ <<Bfront:Size/binary, LastByte:8>> = Bin,
+ %% clear the Unused bits to be sure
+% LastByte1 = LastByte band (((1 bsl Unused) -1) bxor 255),
+ Unused1 = trailingZeroesInNibble(LastByte band 15),
+ Unused2 =
+ case Unused1 of
+ 4 ->
+ 4 + trailingZeroesInNibble(LastByte bsr 4);
+ _ -> Unused1
+ end,
+ case Unused2 of
+ 8 ->
+ remove_trailing_bin(NamedNumberList,{0,Bfront},C);
+ _ ->
+ case C of
+ Int when integer(Int),Int > ((size(Bin)*8)-Unused2) ->
+ %% this padding see OTP-4353
+ pad_list(Int,{Unused2,Bin});
+ _ -> {Unused2,Bin}
+ end
+ end.
+
+
+trailingZeroesInNibble(0) ->
+ 4;
+trailingZeroesInNibble(1) ->
+ 0;
+trailingZeroesInNibble(2) ->
+ 1;
+trailingZeroesInNibble(3) ->
+ 0;
+trailingZeroesInNibble(4) ->
+ 2;
+trailingZeroesInNibble(5) ->
+ 0;
+trailingZeroesInNibble(6) ->
+ 1;
+trailingZeroesInNibble(7) ->
+ 0;
+trailingZeroesInNibble(8) ->
+ 3;
+trailingZeroesInNibble(9) ->
+ 0;
+trailingZeroesInNibble(10) ->
+ 1;
+trailingZeroesInNibble(11) ->
+ 0;
+trailingZeroesInNibble(12) -> %#1100
+ 2;
+trailingZeroesInNibble(13) ->
+ 0;
+trailingZeroesInNibble(14) ->
+ 1;
+trailingZeroesInNibble(15) ->
+ 0.
+
+lower_bound({{Lb,_},_}) when integer(Lb) ->
+ Lb;
+lower_bound({Lb,_}) when integer(Lb) ->
+ Lb;
+lower_bound(C) ->
+ C.
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a tuple {Unused,Bits}. Unused is the number of unused
+%% bits, least significant bits in the last byte of Bits. Bits is
+%% the BIT STRING represented as a binary.
+%%
+decode_compact_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {{8,0},Buffer};
+ V when integer(V),V=<16 -> %fixed length 16 bits or less
+ compact_bit_string(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 -> %fixed length > 16 bits
+ Bytes2 = align(Buffer),
+ compact_bit_string(Bytes2,V,NamedNumberList);
+ V when integer(V) -> % V > 65536 => fragmented value
+ {Bin,Buffer2} = decode_fragmented_bits(Buffer,V),
+ case Buffer2 of
+ {0,_} -> {{0,Bin},Buffer2};
+ {U,_} -> {{8-U,Bin},Buffer2}
+ end;
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ no ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ Sc ->
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_or_named(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 ->
+ Bytes2 = align(Buffer),
+ bit_list_or_named(Bytes2,V,NamedNumberList);
+ V when integer(V) ->
+ Bytes2 = align(Buffer),
+ {BinBits,_} = decode_fragmented_bits(Bytes2,V),
+ bit_list_or_named(BinBits,V,NamedNumberList);
+ Sc -> % extension marker
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%% if no named bits are declared we will return a
+%% {Unused,Bits}. Unused = integer(),
+%% Bits = binary().
+compact_bit_string(Buffer,Len,[]) ->
+ getbits_as_binary(Len,Buffer); % {{Unused,BinBits},NewBuffer}
+compact_bit_string(Buffer,Len,NamedNumberList) ->
+ bit_list_or_named(Buffer,Len,NamedNumberList).
+
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_or_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_or_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_or_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_or_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_or_named1(Pos+1,Bt,Names,Acc);
+bit_list_or_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_or_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_or_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_or_named1(_,[],_,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(Int) when integer(Int), Int > 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)];
+int_to_bitlist(0) ->
+ [].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+pad_list(N,In={Unused,Bin}) ->
+ pad_list(N, size(Bin)*8 - Unused, In).
+
+pad_list(N,Size,In={_,_}) when N < Size ->
+ exit({error,{asn1,{range_error,{bit_string,In}}}});
+pad_list(N,Size,{Unused,Bin}) when N > Size, Unused > 0 ->
+ pad_list(N,Size+1,{Unused-1,Bin});
+pad_list(N,Size,{_Unused,Bin}) when N > Size ->
+ pad_list(N,Size+1,{7,<<Bin/binary,0>>});
+pad_list(N,N,In={_,_}) ->
+ In.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(C,Bool,{_Name,Val}) ->
+ encode_octet_string(C,Bool,Val);
+encode_octet_string(_,true,_) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(C,false,Val) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ 1 ->
+ [V] = Val,
+ {bits,8,V};
+ 2 ->
+ [V1,V2] = Val,
+ [{bits,8,V1},{bits,8,V2}];
+ Sv when Sv =<65535, Sv == length(Val) -> % fixed length
+ {octets,Val};
+ {Lb,Ub} ->
+ [encode_length({Lb,Ub},length(Val)),{octets,Val}];
+ Sv when list(Sv) ->
+ [encode_length({hd(Sv),lists:max(Sv)},length(Val)),{octets,Val}];
+ no ->
+ [encode_length(undefined,length(Val)),{octets,Val}]
+ end.
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,C,false) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ {[],Bytes};
+ 1 ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+ 2 ->
+ {Bs,Bytes2}= getbits(Bytes,16),
+ {binary_to_list(<<Bs:16>>),Bytes2};
+ {_,0} ->
+ {[],Bytes};
+ Sv when integer(Sv), Sv =<65535 -> % fixed length
+ getoctets_as_list(Bytes,Sv);
+ Sv when integer(Sv) -> % fragmented encoding
+ Bytes2 = align(Bytes),
+ decode_fragmented_octets(Bytes2,Sv);
+ {Lb,Ub} ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ getoctets_as_list(Bytes2,Len);
+ Sv when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ getoctets_as_list(Bytes2,Len);
+ no ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_list(Bytes2,Len)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+
+encode_restricted_string(aligned,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,Val);
+
+encode_restricted_string(aligned,Val) when list(Val)->
+ [encode_length(undefined,length(Val)),{octets,Val}].
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,{Name,Val}) when atom(Name) ->
+ encode_known_multiplier_string(aligned,StringType,C,false,Val);
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,Val) ->
+ Result = chars_encode(C,StringType,Val),
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} ->
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ 0 ->
+ [];
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ [align,Result];
+ {Ub,Lb} ->
+ [encode_length({Ub,Lb},length(Val)),align,Result];
+ Vl when list(Vl) ->
+ [encode_length({lists:min(Vl),lists:max(Vl)},length(Val)),align,Result];
+ no ->
+ [encode_length(undefined,length(Val)),align,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_list(Bytes2,Len).
+
+decode_known_multiplier_string(Bytes,aligned,StringType,C,_Ext) ->
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,C,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,C,Ub);
+ 0 ->
+ {[],Bytes};
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len)
+ end.
+
+
+encode_NumericString(C,Val) ->
+ encode_known_multiplier_string(aligned,'NumericString',C,false,Val).
+decode_NumericString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'NumericString',C,false).
+
+encode_PrintableString(C,Val) ->
+ encode_known_multiplier_string(aligned,'PrintableString',C,false,Val).
+decode_PrintableString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'PrintableString',C,false).
+
+encode_VisibleString(C,Val) -> % equivalent with ISO646String
+ encode_known_multiplier_string(aligned,'VisibleString',C,false,Val).
+decode_VisibleString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'VisibleString',C,false).
+
+encode_IA5String(C,Val) ->
+ encode_known_multiplier_string(aligned,'IA5String',C,false,Val).
+decode_IA5String(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'IA5String',C,false).
+
+encode_BMPString(C,Val) ->
+ encode_known_multiplier_string(aligned,'BMPString',C,false,Val).
+decode_BMPString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'BMPString',C,false).
+
+encode_UniversalString(C,Val) ->
+ encode_known_multiplier_string(aligned,'UniversalString',C,false,Val).
+decode_UniversalString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'UniversalString',C,false).
+
+%% end of known-multiplier strings for which PER visible constraints are
+%% applied
+
+encode_GeneralString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GeneralString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_GraphicString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GraphicString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_ObjectDescriptor(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_ObjectDescriptor(Bytes) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_TeletexString(_C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,Val).
+decode_TeletexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_VideotexString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_VideotexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{0,0,O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{0,0,O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+chars_encode(C,StringType,Value) ->
+ case {StringType,get_constraint(C,'PermittedAlphabet')} of
+ {'UniversalString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+ _ ->
+ {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+ chars_encode2(Value,NumBits,CharOutTab)
+ end.
+
+chars_encode2([H|T],NumBits,{Min,Max,notab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,H-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|T],NumBits,{Min,Max,Tab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,exit_if_false(H,element(H-Min+1,Tab))}|chars_encode2(T,NumBits,{Min,Max,Tab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,element((A*B*C*D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,exit_if_false({A,B,C,D},element(((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min,Tab))}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_,{_,_,_}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+exit_if_false(V,false)->
+ exit({error,{asn1,{"illegal value according to Permitted alphabet constraint",V}}});
+exit_if_false(_,V) ->V.
+
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B =< 4 -> 4;
+ B when B =< 8 -> 8;
+ B when B =< 16 -> 16;
+ B when B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',C,Len) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ no ->
+ getBMPChars(Bytes,Len);
+ _ ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet constraint"}}})
+ end;
+chars_decode(Bytes,NumBits,StringType,C,Len) ->
+ CharInTab = get_CharInTab(C,StringType),
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result =
+ if
+ Char < 256 -> Char;
+ true ->
+ list_to_tuple(binary_to_list(<<Char:32>>))
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+% chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+% {Char,Bytes2} = getbits(Bytes,NumBits),
+% Result = case minimum_octets(Char+Min) of
+% [NewChar] -> NewChar;
+% [C1,C2] -> {0,0,C1,C2};
+% [C1,C2,C3] -> {0,C1,C2,C3};
+% [C1,C2,C3,C4] -> {C1,C2,C3,C4}
+% end,
+% chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null(_) -> []; % encodes to nothing
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val).
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier({Name,Val}) when atom(Name) ->
+ encode_object_identifier(Val);
+encode_object_identifier(Val) ->
+ OctetList = e_object_identifier(Val),
+ Octets = list_to_binary(OctetList), % performs a flatten at the same time
+ [{debug,object_identifier},encode_length(undefined,size(Octets)),{octets,Octets}].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname,V}) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname,V}) when atom(Cname),list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail]) when E1 >= 0, E1 < 2, E2 < 40 ; E1==2 ->
+ Head = 40*E1 + E2, % weird
+ e_object_elements([Head|Tail],[]);
+e_object_identifier(Oid=[_,_|_Tail]) ->
+ exit({error,{asn1,{'illegal_value',Oid}}}).
+
+e_object_elements([],Acc) ->
+ lists:reverse(Acc);
+e_object_elements([H|T],Acc) ->
+ e_object_elements(T,[e_object_element(H)|Acc]).
+
+e_object_element(Num) when Num < 128 ->
+ Num;
+%% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint([{Key,V}],Key) ->
+ V;
+get_constraint([],_Key) ->
+ no;
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+
+% complete(L) ->
+% case complete1(L) of
+% {[],0} ->
+% <<0>>;
+% {Acc,0} ->
+% lists:reverse(Acc);
+% {[Hacc|Tacc],Acclen} -> % Acclen >0
+% Rest = 8 - Acclen,
+% NewHacc = Hacc bsl Rest,
+% lists:reverse([NewHacc|Tacc])
+% end.
+
+
+% complete1(InList) when list(InList) ->
+% complete1(InList,[]);
+% complete1(InList) ->
+% complete1([InList],[]).
+
+% complete1([{debug,_}|T], Acc) ->
+% complete1(T,Acc);
+% complete1([H|T],Acc) when list(H) ->
+% {NewH,NewAcclen} = complete1(H,Acc),
+% complete1(T,NewH,NewAcclen);
+
+% complete1([{0,Bin}|T],Acc,0) when binary(Bin) ->
+% complete1(T,[Bin|Acc],0);
+% complete1([{Unused,Bin}|T],Acc,0) when integer(Unused),binary(Bin) ->
+% Size = size(Bin)-1,
+% <<Bs:Size/binary,B>> = Bin,
+% complete1(T,[(B bsr Unused),Bs|Acc],8-Unused);
+% complete1([{Unused,Bin}|T],[Hacc|Tacc],Acclen) when integer(Unused),binary(Bin) ->
+% Rest = 8 - Acclen,
+% Used = 8 - Unused,
+% case size(Bin) of
+% 1 ->
+% if
+% Rest >= Used ->
+% <<B:Used,_:Unused>> = Bin,
+% complete1(T,[(Hacc bsl Used) + B|Tacc],
+% (Acclen+Used) rem 8);
+% true ->
+% LeftOver = 8 - Rest - Unused,
+% <<Val2:Rest,Val1:LeftOver,_:Unused>> = Bin,
+% complete1(T,[Val1,(Hacc bsl Rest) + Val2|Tacc],
+% (Acclen+Used) rem 8)
+% end;
+% N ->
+% if
+% Rest == Used ->
+% N1 = N - 1,
+% <<B:Rest,Bs:N1/binary,_:Unused>> = Bin,
+% complete1(T,[Bs,(Hacc bsl Rest) + B|Tacc],0);
+% Rest > Used ->
+% N1 = N - 2,
+% N2 = (8 - Rest) + Used,
+% <<B1:Rest,Bytes:N1/binary,B2:N2,_:Unused>> = Bin,
+% complete1(T,[B2,Bytes,(Hacc bsl Rest) + B1|Tacc],
+% (Acclen + Used) rem 8);
+% true -> % Rest < Used
+% N1 = N - 1,
+% N2 = Used - Rest,
+% <<B1:Rest,Bytes:N1/binary,B2:N2,_:Unused>> = Bin,
+% complete1(T,[B2,Bytes,(Hacc bsl Rest) + B1|Tacc],
+% (Acclen + Used) rem 8)
+% end
+% end;
+
+% %complete1([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+% % complete1([{octets,<<Val:N/unit:8>>}|T],Acc,Acclen);
+% complete1([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+% Newval = case N of
+% 1 ->
+% Val4 = Val band 16#FF,
+% [Val4];
+% 2 ->
+% Val3 = (Val bsr 8) band 16#FF,
+% Val4 = Val band 16#FF,
+% [Val3,Val4];
+% 3 ->
+% Val2 = (Val bsr 16) band 16#FF,
+% Val3 = (Val bsr 8) band 16#FF,
+% Val4 = Val band 16#FF,
+% [Val2,Val3,Val4];
+% 4 ->
+% Val1 = (Val bsr 24) band 16#FF,
+% Val2 = (Val bsr 16) band 16#FF,
+% Val3 = (Val bsr 8) band 16#FF,
+% Val4 = Val band 16#FF,
+% [Val1,Val2,Val3,Val4]
+% end,
+% complete1([{octets,Newval}|T],Acc,Acclen);
+
+% complete1([{octets,Bin}|T],Acc,Acclen) when binary(Bin) ->
+% Rest = 8 - Acclen,
+% if
+% Rest == 8 ->
+% complete1(T,[Bin|Acc],0);
+% true ->
+% [Hacc|Tacc]=Acc,
+% complete1(T,[Bin, Hacc bsl Rest|Tacc],0)
+% end;
+
+% complete1([{octets,Oct}|T],Acc,Acclen) when list(Oct) ->
+% Rest = 8 - Acclen,
+% if
+% Rest == 8 ->
+% complete1(T,[list_to_binary(Oct)|Acc],0);
+% true ->
+% [Hacc|Tacc]=Acc,
+% complete1(T,[list_to_binary(Oct), Hacc bsl Rest|Tacc],0)
+% end;
+
+% complete1([{bit,Val}|T], Acc, Acclen) ->
+% complete1([{bits,1,Val}|T],Acc,Acclen);
+% complete1([{octet,Val}|T], Acc, Acclen) ->
+% complete1([{octets,1,Val}|T],Acc,Acclen);
+
+% complete1([{bits,N,Val}|T], Acc, 0) when N =< 8 ->
+% complete1(T,[Val|Acc],N);
+% complete1([{bits,N,Val}|T], [Hacc|Tacc], Acclen) when N =< 8 ->
+% Rest = 8 - Acclen,
+% if
+% Rest >= N ->
+% complete1(T,[(Hacc bsl N) + Val|Tacc],(Acclen+N) rem 8);
+% true ->
+% Diff = N - Rest,
+% NewHacc = (Hacc bsl Rest) + (Val bsr Diff),
+% Mask = element(Diff,{1,3,7,15,31,63,127,255}),
+% complete1(T,[(Val band Mask),NewHacc|Tacc],(Acclen+N) rem 8)
+% end;
+% complete1([{bits,N,Val}|T], Acc, Acclen) -> % N > 8
+% complete1([{bits,N-8,Val bsr 8},{bits,8,Val band 255}|T],Acc,Acclen);
+
+% complete1([align|T],Acc,0) ->
+% complete1(T,Acc,0);
+% complete1([align|T],[Hacc|Tacc],Acclen) ->
+% Rest = 8 - Acclen,
+% complete1(T,[Hacc bsl Rest|Tacc],0);
+% complete1([{octets,N,Val}|T],Acc,Acclen) when list(Val) -> % no security check here
+% complete1([{octets,Val}|T],Acc,Acclen);
+
+% complete1([],Acc,Acclen) ->
+% {Acc,Acclen}.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+
+complete(L) ->
+ case complete1(L) of
+ {[],[]} ->
+ <<0>>;
+ {Acc,[]} ->
+ Acc;
+ {Acc,Bacc} ->
+ [Acc|complete_bytes(Bacc)]
+ end.
+
+%% this function builds the ugly form of lists [E1|E2] to avoid having to reverse it at the end.
+%% this is done because it is efficient and that the result always will be sent on a port or
+%% converted by means of list_to_binary/1
+complete1(InList) when list(InList) ->
+ complete1(InList,[],[]);
+complete1(InList) ->
+ complete1([InList],[],[]).
+
+complete1([],Acc,Bacc) ->
+ {Acc,Bacc};
+complete1([H|T],Acc,Bacc) when list(H) ->
+ {NewH,NewBacc} = complete1(H,Acc,Bacc),
+ complete1(T,NewH,NewBacc);
+
+complete1([{octets,Bin}|T],Acc,[]) ->
+ complete1(T,[Acc|Bin],[]);
+
+complete1([{octets,Bin}|T],Acc,Bacc) ->
+ complete1(T,[Acc|[complete_bytes(Bacc),Bin]],[]);
+
+complete1([{debug,_}|T], Acc,Bacc) ->
+ complete1(T,Acc,Bacc);
+
+complete1([{bits,N,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,N));
+
+complete1([{bit,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,1));
+
+complete1([align|T],Acc,[]) ->
+ complete1(T,Acc,[]);
+complete1([align|T],Acc,Bacc) ->
+ complete1(T,[Acc|complete_bytes(Bacc)],[]);
+complete1([{0,Bin}|T],Acc,[]) when binary(Bin) ->
+ complete1(T,[Acc|Bin],[]);
+complete1([{Unused,Bin}|T],Acc,[]) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8-Unused,
+ complete1(T,[Acc|Bs],[[B bsr Unused]|NumBits]);
+complete1([{Unused,Bin}|T],Acc,Bacc) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8 - Unused,
+ Bf = complete_bytes(Bacc),
+ complete1(T,[Acc|[Bf,Bs]],[[B bsr Unused]|NumBits]).
+
+
+complete_update_byte([],Val,Len) ->
+ complete_update_byte([[0]|0],Val,Len);
+complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len == 8 ->
+ [[0,((Byte bsl Len) + Val) band 255|Bacc]|0];
+complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len > 8 ->
+ Rem = 8 - NumBits,
+ Rest = Len - Rem,
+ complete_update_byte([[0,((Byte bsl Rem) + (Val bsr Rest)) band 255 |Bacc]|0],Val,Rest);
+complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) ->
+ [[((Byte bsl Len) + Val) band 255|Bacc]|NumBits+Len].
+
+
+complete_bytes([[_Byte|Bacc]|0]) ->
+ lists:reverse(Bacc);
+complete_bytes([[Byte|Bacc]|NumBytes]) ->
+ lists:reverse([(Byte bsl (8-NumBytes)) band 255|Bacc]);
+complete_bytes([]) ->
+ [].
+
+% complete_bytes(L) ->
+% complete_bytes1(lists:reverse(L),[],[],0,0).
+
+% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) when ((NumBits+B) rem 8) == 0 ->
+% NewReplyAcc = [complete_bytes2([H|Acc],0)|ReplyAcc],
+% complete_bytes1(T,[],NewReplyAcc,0,0);
+% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) when NumFields == 7; (NumBits+B) div 8 > 0 ->
+% Rem = (NumBits+B) rem 8,
+% NewReplyAcc = [complete_bytes2([{V bsr Rem,B - Rem}|Acc],0)|ReplyAcc],
+% complete_bytes1([{V,Rem}|T],[],NewReplyAcc,0,0);
+% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) ->
+% complete_bytes1(T,[H|Acc],ReplyAcc,NumBits+B,NumFields+1);
+% complete_bytes1([],[],ReplyAcc,_,_) ->
+% lists:reverse(ReplyAcc);
+% complete_bytes1([],Acc,ReplyAcc,NumBits,_) ->
+% PadBits = case NumBits rem 8 of
+% 0 -> 0;
+% Rem -> 8 - Rem
+% end,
+% lists:reverse([complete_bytes2(Acc,PadBits)|ReplyAcc]).
+
+
+% complete_bytes2([{V1,B1}],PadBits) ->
+% <<V1:B1,0:PadBits>>;
+% complete_bytes2([{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,0:PadBits>>;
+% complete_bytes2([{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,0:PadBits>>;
+% complete_bytes2([{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,0:PadBits>>;
+% complete_bytes2([{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,0:PadBits>>;
+% complete_bytes2([{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,0:PadBits>>;
+% complete_bytes2([{V7,B7},{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,V7:B7,0:PadBits>>;
+% complete_bytes2([{V8,B8},{V7,B7},{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,V7:B7,V8:B8,0:PadBits>>.
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
new file mode 100644
index 0000000000..0647650ea6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
@@ -0,0 +1,2102 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per_bin_rt2ct.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per_bin_rt2ct).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/3, fixextensions/2,
+ getext/1, getextension/2, skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/2, getoptionals2/2,
+ set_choice/3, encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_small_number/1,
+ decode_boolean/1, encode_length/2, decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1,
+ decode_compact_bit_string/3]).
+-export([decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([%encode_UniversalString/2, decode_UniversalString/2,
+ %encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ %encode_VisibleString/2, decode_VisibleString/2,
+ %encode_BMPString/2, decode_BMPString/2,
+ %encode_IA5String/2, decode_IA5String/2,
+ %encode_NumericString/2, decode_NumericString/2,
+ encode_ObjectDescriptor/2, decode_ObjectDescriptor/1
+ ]).
+
+-export([decode_constrained_number/2,
+ decode_constrained_number/3,
+ decode_unconstrained_number/1,
+ decode_semi_constrained_number/2,
+ encode_unconstrained_number/1,
+ decode_constrained_number/4,
+ encode_octet_string/3,
+ decode_octet_string/3,
+ encode_known_multiplier_string/5,
+ decode_known_multiplier_string/5,
+ getoctets/2, getbits/2
+% start_drv/1,start_drv2/1,init_drv/1
+ ]).
+
+
+-export([eint_positive/1]).
+-export([pre_complete_bits/2]).
+
+-define('16K',16384).
+-define('32K',32768).
+-define('64K',65536).
+
+%%-define(nodriver,true).
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%% converts a list to a record if necessary
+list_to_record(_,Tuple) when tuple(Tuple) ->
+ Tuple;
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]).
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+% [{debug,choiceext},{bits,1,0}];
+ [0];
+setchoiceext(false) ->
+% [{debug,choiceext},{bits,1,1}].
+ [1].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(false) ->
+% [{debug,ext},{bits,1,0}];
+ [0];
+setext(true) ->
+% [{debug,ext},{bits,1,1}];
+ [1].
+
+fixoptionals(OptList,_OptLength,Val) when tuple(Val) ->
+% Bits = fixoptionals(OptList,Val,0),
+% {Val,{bits,OptLength,Bits}};
+% {Val,[10,OptLength,Bits]};
+ {Val,fixoptionals(OptList,Val,[])};
+
+fixoptionals([],_,Acc) ->
+ %% Optbits
+ lists:reverse(Acc);
+fixoptionals([Pos|Ot],Val,Acc) ->
+ case element(Pos,Val) of
+% asn1_NOVALUE -> fixoptionals(Ot,Val,Acc bsl 1);
+% asn1_DEFAULT -> fixoptionals(Ot,Val,Acc bsl 1);
+% _ -> fixoptionals(Ot,Val,(Acc bsl 1) + 1)
+ asn1_NOVALUE -> fixoptionals(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,[0|Acc]);
+ _ -> fixoptionals(Ot,Val,[1|Acc])
+ end.
+
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when binary(Bytes) ->
+ getbit({0,Bytes});
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+% [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+% [encode_small_length(ExtNum),[10,ExtNum,ExtBits]]
+ [encode_small_length(ExtNum),pre_complete_bits(ExtNum,ExtBits)]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_constrained_number(Bytes,{0,NumChoices-1}).
+
+%% old version kept for backward compatibility with generates from R7B01
+getoptionals(Bytes,NumOpt) ->
+ {Blist,Bytes1} = getbits_as_list(NumOpt,Bytes),
+ {list_to_tuple(Blist),Bytes1}.
+
+%% new version used in generates from r8b_patch/3 and later
+getoptionals2(Bytes,NumOpt) ->
+ {_,_} = getbits(Bytes,NumOpt).
+
+
+%% getbits_as_binary(Num,Bytes) -> {{Unused,BinBits},RestBytes},
+%% Num = integer(),
+%% Bytes = list() | tuple(),
+%% Unused = integer(),
+%% BinBits = binary(),
+%% RestBytes = tuple()
+getbits_as_binary(Num,Bytes) when binary(Bytes) ->
+ getbits_as_binary(Num,{0,Bytes});
+getbits_as_binary(0,Buffer) ->
+ {{0,<<>>},Buffer};
+getbits_as_binary(Num,{0,Bin}) when Num > 16 ->
+ Used = Num rem 8,
+ Pad = (8 - Used) rem 8,
+%% Nbytes = Num div 8,
+ <<Bits:Num,_:Pad,RestBin/binary>> = Bin,
+ {{Pad,<<Bits:Num,0:Pad>>},RestBin};
+getbits_as_binary(Num,Buffer={_Used,_Bin}) -> % Unaligned buffer
+ %% Num =< 16,
+ {Bits2,Buffer2} = getbits(Buffer,Num),
+ Pad = (8 - (Num rem 8)) rem 8,
+ {{Pad,<<Bits2:Num,0:Pad>>},Buffer2}.
+
+
+% integer_from_list(Int,[],BigInt) ->
+% BigInt;
+% integer_from_list(Int,[H|T],BigInt) when Int < 8 ->
+% (BigInt bsl Int) bor (H bsr (8-Int));
+% integer_from_list(Int,[H|T],BigInt) ->
+% integer_from_list(Int-8,T,(BigInt bsl 8) bor H).
+
+getbits_as_list(Num,Bytes) when binary(Bytes) ->
+ getbits_as_list(Num,{0,Bytes},[]);
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+%% If buffer is empty and nothing more will be picked.
+getbits_as_list(0, B, Acc) ->
+ {lists:reverse(Acc),B};
+%% If first byte in buffer is full and at least one byte will be picked,
+%% then pick one byte.
+getbits_as_list(N,{0,Bin},Acc) when N >= 8 ->
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Rest/binary>> = Bin,
+ getbits_as_list(N-8,{0,Rest},[B0,B1,B2,B3,B4,B5,B6,B7|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 4, Used =< 4 ->
+ NewUsed = Used + 4,
+ Rem = 8 - NewUsed,
+ <<_:Used,B3:1,B2:1,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-4,{NewUsed rem 8,NewRest},[B0,B1,B2,B3|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 2, Used =< 6 ->
+ NewUsed = Used + 2,
+ Rem = 8 - NewUsed,
+ <<_:Used,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-2,{NewUsed rem 8,NewRest},[B0,B1|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when Used =< 7 ->
+ NewUsed = Used + 1,
+ Rem = 8 - NewUsed,
+ <<_:Used,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-1,{NewUsed rem 8,NewRest},[B0|Acc]).
+
+
+getbit({7,<<_:7,B:1,Rest/binary>>}) ->
+ {B,{0,Rest}};
+getbit({0,Buffer = <<B:1,_:7,_/binary>>}) ->
+ {B,{1,Buffer}};
+getbit({Used,Buffer}) ->
+ Unused = (8 - Used) - 1,
+ <<_:Used,B:1,_:Unused,_/binary>> = Buffer,
+ {B,{Used+1,Buffer}};
+getbit(Buffer) when binary(Buffer) ->
+ getbit({0,Buffer}).
+
+
+getbits({0,Buffer},Num) when (Num rem 8) == 0 ->
+ <<Bits:Num,Rest/binary>> = Buffer,
+ {Bits,{0,Rest}};
+getbits({Used,Bin},Num) ->
+ NumPlusUsed = Num + Used,
+ NewUsed = NumPlusUsed rem 8,
+ Unused = (8-NewUsed) rem 8,
+ case Unused of
+ 0 ->
+ <<_:Used,Bits:Num,Rest/binary>> = Bin,
+ {Bits,{0,Rest}};
+ _ ->
+ Bytes = NumPlusUsed div 8,
+ <<_:Used,Bits:Num,_:Unused,_/binary>> = Bin,
+ <<_:Bytes/binary,Rest/binary>> = Bin,
+ {Bits,{NewUsed,Rest}}
+ end;
+getbits(Bin,Num) when binary(Bin) ->
+ getbits({0,Bin},Num).
+
+
+
+% getoctet(Bytes) when list(Bytes) ->
+% getoctet({0,Bytes});
+% getoctet(Bytes) ->
+% %% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+% getoctet1(Bytes).
+
+% getoctet1({0,[H|T]}) ->
+% {H,{0,T}};
+% getoctet1({Pos,[_,H|T]}) ->
+% {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,<<_H,T/binary>>}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as an integer with bit significance as in buffer.
+getoctets({0,Buffer},Num) ->
+ <<Val:Num/integer-unit:8,RestBin/binary>> = Buffer,
+ {Val,{0,RestBin}};
+getoctets({U,<<_Padding,Rest/binary>>},Num) when U /= 0 ->
+ getoctets({0,Rest},Num);
+getoctets(Buffer,Num) when binary(Buffer) ->
+ getoctets({0,Buffer},Num).
+% getoctets(Buffer,Num) ->
+% %% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+% getoctets(Buffer,Num,0).
+
+% getoctets(Buffer,0,Acc) ->
+% {Acc,Buffer};
+% getoctets(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+% getoctets_as_list(Buffer,Num) ->
+% getoctets_as_list(Buffer,Num,[]).
+
+% getoctets_as_list(Buffer,0,Acc) ->
+% {lists:reverse(Acc),Buffer};
+% getoctets_as_list(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as a binary
+getoctets_as_bin({0,Bin},Num)->
+ <<Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin({_U,Bin},Num) ->
+ <<_Padding,Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin(Bin,Num) when binary(Bin) ->
+ getoctets_as_bin({0,Bin},Num).
+
+%% same as above but returns octets as a List
+getoctets_as_list(Buffer,Num) ->
+ {Bin,Buffer2} = getoctets_as_bin(Buffer,Num),
+ {binary_to_list(Bin),Buffer2}.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+% [{bits,1,0}, % the value is in the root set
+% encode_constrained_number({0,Len1-1},N)];
+ [0, % the value is in the root set
+ encode_constrained_number({0,Len1-1},N)];
+ N when integer(N) ->
+% [{bits,1,0}]; % no encoding if only 0 or 1 alternative
+ [0]; % no encoding if only 0 or 1 alternative
+ false ->
+% [{bits,1,1}, % extension value
+ [1, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_constrained_number({0,Len-1},N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_Alt,[],_Tag) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_fragmented_XXX; decode of values encoded fragmented according
+%% to ITU-T X.691 clause 10.9.3.8. The unit (XXX) is either bits, octets,
+%% characters or number of components (in a choice,sequence or similar).
+%% Buffer is a buffer {Used, Bin}.
+%% C is the constrained length.
+%% If the buffer is not aligned, this function does that.
+decode_fragmented_bits({0,Buffer},C) ->
+ decode_fragmented_bits(Buffer,C,[]);
+decode_fragmented_bits({_N,<<_B,Bs/binary>>},C) ->
+ decode_fragmented_bits(Bs,C,[]).
+
+decode_fragmented_bits(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin, Len * ?'16K'),
+ decode_fragmented_bits(Bin2,C,[Value,Acc]);
+decode_fragmented_bits(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ BinBits = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int),C == size(BinBits) ->
+ {BinBits,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ {BinBits,{0,Bin}}
+ end;
+decode_fragmented_bits(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ Result = {BinBits,{Used,_Rest}} =
+ case (Len rem 8) of
+ 0 ->
+ <<Value:Len/binary-unit:1,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Value|Acc])),{0,Bin2}};
+ Rem ->
+ Bytes = Len div 8,
+ U = 8 - Rem,
+ <<Value:Bytes/binary-unit:8,Bits1:Rem,Bits2:U,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Bits1 bsl U,Value|Acc])),
+ {Rem,<<Bits2,Bin2/binary>>}}
+ end,
+ case C of
+ Int when integer(Int),C == (size(BinBits) - ((8 - Used) rem 8)) ->
+ Result;
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ Result
+ end.
+
+
+decode_fragmented_octets({0,Bin},C) ->
+ decode_fragmented_octets(Bin,C,[]);
+decode_fragmented_octets({_N,<<_B,Bs/binary>>},C) ->
+ decode_fragmented_octets(Bs,C,[]).
+
+decode_fragmented_octets(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin,Len * ?'16K'),
+ decode_fragmented_octets(Bin2,C,[Value,Acc]);
+decode_fragmented_octets(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ Octets = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int), C == size(Octets) ->
+ {Octets,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,Octets}}});
+ _ ->
+ {Octets,{0,Bin}}
+ end;
+decode_fragmented_octets(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ <<Value:Len/binary-unit:8,Bin2/binary>> = Bin,
+ BinOctets = list_to_binary(lists:reverse([Value|Acc])),
+ case C of
+ Int when integer(Int),size(BinOctets) == Int ->
+ {BinOctets,Bin2};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinOctets}}});
+ _ ->
+ {BinOctets,Bin2}
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_Constraint, Val) when list(Val) ->
+ Bin = list_to_binary(Val),
+ case size(Bin) of
+ Size when Size>255 ->
+ [encode_length(undefined,Size),[21,<<Size:16>>,Bin]];
+ Size ->
+ [encode_length(undefined,Size),[20,Size,Bin]]
+ end;
+% [encode_length(undefined,size(Bin)),{octets,Bin}]; % octets implies align
+encode_open_type(_Constraint, Val) when binary(Val) ->
+% [encode_length(undefined,size(Val)),{octets,Val}]. % octets implies align
+ case size(Val) of
+ Size when Size>255 ->
+ [encode_length(undefined,size(Val)),[21,<<Size:16>>,Val]]; % octets implies align
+ Size ->
+ [encode_length(undefined,Size),[20,Size,Val]]
+ end.
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _Constraint) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_bin(Bytes2,Len).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_NamedNumberList) when integer(V) ->
+ encode_integer(C,V);
+encode_integer(C,{Name,V},NamedNumberList) when atom(Name) ->
+ encode_integer(C,V,NamedNumberList).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer([{Rc,_Ec}],Val) when tuple(Rc) -> % XXX when is this invoked? First argument most often a list,...Ok this is the extension case...but it doesn't work.
+ case (catch encode_integer([Rc],Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+% [{bits,1,1},encode_unconstrained_number(Val)];
+ [1,encode_unconstrained_number(Val)];
+ Encoded ->
+% [{bits,1,0},Encoded]
+ [0,Encoded]
+ end;
+
+encode_integer([],Val) ->
+ encode_unconstrained_number(Val);
+%% The constraint is the effective constraint, and in this case is a number
+encode_integer([{'SingleValue',V}],V) ->
+ [];
+encode_integer([{'ValueRange',VR={Lb,Ub},Range,PreEnc}],Val) when Val >= Lb,
+ Ub >= Val ->
+ %% this case when NamedNumberList
+ encode_constrained_number(VR,Range,PreEnc,Val);
+encode_integer([{'ValueRange',{Lb,'MAX'}}],Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_integer([{'ValueRange',{'MIN',_}}],Val) ->
+ encode_unconstrained_number(Val);
+encode_integer([{'ValueRange',VR={_Lb,_Ub}}],Val) ->
+ encode_constrained_number(VR,Val);
+encode_integer(_,Val) ->
+ exit({error,{asn1,{illegal_value,Val}}}).
+
+
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,[{Rc,_Ec}]) when tuple(Rc) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,[Rc]);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_Lb,_Ub} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+%% X.691:10.6 Encoding of a normally small non-negative whole number
+%% Use this for encoding of CHOICE index if there is an extension marker in
+%% the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+% [{bits,1,0},{bits,6,Val}];
+% [{bits,7,Val}]; % same as above but more efficient
+ [10,7,Val]; % same as above but more efficient
+encode_small_number(Val) ->
+% [{bits,1,1},encode_semi_constrained_number(0,Val)].
+ [1,encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,0)
+ end.
+
+%% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Oct = eint_positive(Val2),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ %{octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ [20,Len+1,[Len|Oct]];
+ Len < 256 ->
+ [encode_length(undefined,Len),[20,Len,Oct]];
+ true ->
+ [encode_length(undefined,Len),[21,<<Len:16>>,Oct]]
+ end.
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number({Lb,_Ub},_Range,{bits,N},Val) ->
+ Val2 = Val-Lb,
+% {bits,N,Val2};
+ [10,N,Val2];
+encode_constrained_number({Lb,_Ub},_Range,{octets,N},Val) when N < 256->
+ %% N is 8 or 16 (1 or 2 octets)
+ Val2 = Val-Lb,
+% {octets,<<Val2:N/unit:8>>};
+ [20,N,Val2];
+encode_constrained_number({Lb,_Ub},_Range,{octets,N},Val) -> % N>255
+ %% N is 8 or 16 (1 or 2 octets)
+ Val2 = Val-Lb,
+% {octets,<<Val2:N/unit:8>>};
+ [21,<<N:16>>,Val2];
+encode_constrained_number({Lb,_Ub},Range,_,Val) ->
+ Val2 = Val-Lb,
+ if
+ Range =< 16#1000000 -> % max 3 octets
+ Octs = eint_positive(Val2),
+% [encode_length({1,3},size(Octs)),{octets,Octs}];
+ L = length(Octs),
+ [encode_length({1,3},L),[20,L,Octs]];
+ Range =< 16#100000000 -> % max 4 octets
+ Octs = eint_positive(Val2),
+% [encode_length({1,4},size(Octs)),{octets,Octs}];
+ L = length(Octs),
+ [encode_length({1,4},L),[20,L,Octs]];
+ Range =< 16#10000000000 -> % max 5 octets
+ Octs = eint_positive(Val2),
+% [encode_length({1,5},size(Octs)),{octets,Octs}];
+ L = length(Octs),
+ [encode_length({1,5},L),[20,L,Octs]];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+% Size = {bits,1,Val2};
+ [Val2];
+ Range =< 4 ->
+% Size = {bits,2,Val2};
+ [10,2,Val2];
+ Range =< 8 ->
+ [10,3,Val2];
+ Range =< 16 ->
+ [10,4,Val2];
+ Range =< 32 ->
+ [10,5,Val2];
+ Range =< 64 ->
+ [10,6,Val2];
+ Range =< 128 ->
+ [10,7,Val2];
+ Range =< 255 ->
+ [10,8,Val2];
+ Range =< 256 ->
+% Size = {octets,[Val2]};
+ [20,1,Val2];
+ Range =< 65536 ->
+% Size = {octets,<<Val2:16>>};
+ [20,2,<<Val2:16>>];
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+% [{bits,2,length(Octs)-1},{octets,Octs}];
+ Len = length(Octs),
+ [10,2,Len-1,20,Len,Octs];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ Len = length(Octs),
+ [10,2,Len-1,20,Len,Octs];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ Len = length(Octs),
+ [10,3,Len-1,20,Len,Octs];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end;
+encode_constrained_number({_,_},Val) ->
+ exit({error,{asn1,{illegal_value,Val}}}).
+
+decode_constrained_number(Buffer,VR={Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+ decode_constrained_number(Buffer,VR,Range).
+
+decode_constrained_number(Buffer,{Lb,_Ub},_Range,{bits,N}) ->
+ {Val,Remain} = getbits(Buffer,N),
+ {Val+Lb,Remain};
+decode_constrained_number(Buffer,{Lb,_Ub},_Range,{octets,N}) ->
+ {Val,Remain} = getoctets(Buffer,N),
+ {Val+Lb,Remain}.
+
+decode_constrained_number(Buffer,{Lb,_Ub},Range) ->
+ % Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+%% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ %{octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ [20,Len+1,[Len|Oct]];
+ Len < 256 ->
+% [encode_length(undefined,Len),20,Len,Oct];
+ [20,Len+2,<<2:2,Len:14>>,Oct];% equiv with encode_length(undefined,Len) but faster
+ true ->
+% [encode_length(undefined,Len),{octets,Oct}]
+ [encode_length(undefined,Len),[21,<<Len:16>>,Oct]]
+ end;
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+% {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ [20,Len+1,[Len|Oct]];% equiv with encode_length(undefined,Len) but faster
+ Len < 256 ->
+% [encode_length(undefined,Len),20,Len,Oct];
+ [20,Len+2,<<2:2,Len:14>>,Oct];% equiv with encode_length(undefined,Len) but faster
+ true ->
+ %[encode_length(undefined,Len),{octets,Oct}]
+ [encode_length(undefined,Len),[21,<<Len:16>>,Oct]]
+ end.
+
+
+%% used for positive Values which don't need a sign bit
+%% returns a list
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+% minimum_octets(Val) ->
+% minimum_octets(Val,[]).
+
+% minimum_octets(Val,Acc) when Val > 0 ->
+% minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+% minimum_octets(0,Acc) ->
+% Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+% {octets,[Len]};
+ [20,1,Len];
+ Len < 16384 ->
+ %{octets,<<2:2,Len:14>>};
+ [20,2,<<2:2,Len:14>>];
+ true -> % should be able to endode length >= 16384 i.e. fragmented length
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length(Vr={Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number(Vr,Len);
+encode_length({Lb,_Ub},Len) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ encode_length(undefined,Len);
+encode_length({Vr={Lb,Ub},[]},Len) when Ub =< 65535 ,Lb >= 0,Len=<Ub ->
+ %% constrained extensible
+% [{bits,1,0},encode_constrained_number(Vr,Len)];
+ [0,encode_constrained_number(Vr,Len)];
+encode_length({{Lb,_},[]},Len) ->
+ [1,encode_semi_constrained_number(Lb,Len)];
+encode_length(SingleValue,_Len) when integer(SingleValue) ->
+ [].
+
+%% X.691 10.9.3.4 (only used for length of bitmap that prefixes extension
+%% additions in a sequence or set
+encode_small_length(Len) when Len =< 64 ->
+%% [{bits,1,0},{bits,6,Len-1}];
+% {bits,7,Len-1}; % the same as above but more efficient
+ [10,7,Len-1];
+encode_small_length(Len) ->
+% [{bits,1,1},encode_length(undefined,Len)].
+ [1,encode_length(undefined,Len)].
+
+% decode_small_length({Used,<<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>>}) ->
+% case Buffer of
+% <<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>> ->
+% {Num,
+% case getbit(Buffer) of
+% {0,Remain} ->
+% {Bits,Remain2} = getbits(Remain,6),
+% {Bits+1,Remain2};
+% {1,Remain} ->
+% decode_length(Remain,undefined)
+% end.
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ {0,Buffer2} = align(Buffer),
+ case Buffer2 of
+ <<0:1,Oct:7,Rest/binary>> ->
+ {Oct,{0,Rest}};
+ <<2:2,Val:14,Rest/binary>> ->
+ {Val,{0,Rest}};
+ <<3:2,_Val:14,_Rest/binary>> ->
+ %% this case should be fixed
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}})
+ end;
+%% {Bits,_} = getbits(Buffer2,2),
+% case Bits of
+% 2 ->
+% {Val,Bytes3} = getoctets(Buffer2,2),
+% {(Val band 16#3FFF),Bytes3};
+% 3 ->
+% exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+% _ ->
+% {Val,Bytes3} = getoctet(Buffer2),
+% {Val band 16#7F,Bytes3}
+% end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+decode_length(_Buffer,{Lb,_Ub}) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ exit({error,{asn1,{decode_length,{nyi,above_64K}}}});
+decode_length(Buffer,{{Lb,Ub},[]}) ->
+ case getbit(Buffer) of
+ {0,Buffer2} ->
+ decode_length(Buffer2, {Lb,Ub})
+ end;
+
+
+%When does this case occur with {_,_Lb,Ub} ??
+% X.691:10.9.3.5
+decode_length({Used,Bin},{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub NOTE! this case does not cover case when Ub > 65535
+ Unused = (8-Used) rem 8,
+ case Bin of
+ <<_:Used,0:1,Val:7,R:Unused,Rest/binary>> ->
+ {Val,{Used,<<R,Rest/binary>>}};
+ <<_:Used,_:Unused,2:2,Val:14,Rest/binary>> ->
+ {Val, {0,Rest}};
+ <<_:Used,_:Unused,3:2,_:14,_Rest/binary>> ->
+ exit({error,{asn1,{decode_length,{nyi,length_above_64K}}}})
+ end;
+% decode_length(Buffer,{_,_Lb,Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+% case getbit(Buffer) of
+% {0,Remain} ->
+% getbits(Remain,7);
+% {1,Remain} ->
+% {Val,Remain2} = getoctets(Buffer,2),
+% {Val band 2#0111111111111111, Remain2}
+% end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+ % X.691:11
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+
+%% when the value is a list of {Unused,BinBits}, where
+%% Unused = integer(),
+%% BinBits = binary().
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList) when integer(Unused),
+ binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList);
+
+%% when the value is a list of named bits
+
+encode_bit_string(C, LoNB=[FirstVal | _RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes(LoNB, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);% consider the constraint
+
+encode_bit_string(C, BL=[{bit,_} | _RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes(BL, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+encode_bit_string(Int, BitListValue, _)
+ when list(BitListValue),integer(Int) ->
+ %% The type is constrained by a single value size constraint
+ [40,Int,length(BitListValue),BitListValue];
+% encode_bit_string(C, BitListValue,NamedBitList)
+% when list(BitListValue) ->
+% [encode_bit_str_length(C,BitListValue),
+% 2,45,BitListValue];
+encode_bit_string(no, BitListValue,[])
+ when list(BitListValue) ->
+ [encode_length(undefined,length(BitListValue)),
+ 2,BitListValue];
+encode_bit_string(C, BitListValue,[])
+ when list(BitListValue) ->
+ [encode_length(C,length(BitListValue)),
+ 2,BitListValue];
+encode_bit_string(no, BitListValue,_NamedBitList)
+ when list(BitListValue) ->
+ %% this case with an unconstrained BIT STRING can be made more efficient
+ %% if the complete driver can take a special code so the length field
+ %% is encoded there.
+ NewBitLVal = lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitListValue))),
+ [encode_length(undefined,length(NewBitLVal)),
+ 2,NewBitLVal];
+encode_bit_string(C,BitListValue,_NamedBitList)
+ when list(BitListValue) ->% C = {_,'MAX'}
+% NewBitLVal = lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+% lists:reverse(BitListValue))),
+ NewBitLVal = bit_string_trailing_zeros(BitListValue,C),
+ [encode_length(C,length(NewBitLVal)),
+ 2,NewBitLVal];
+
+% encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+% BitListToBinary =
+% %% fun that transforms a list of 1 and 0 to a tuple:
+% %% {UnusedBitsInLastByte, Binary}
+% fun([H|T],Acc,N,Fun) ->
+% Fun(T,(Acc bsl 1)+H,N+1,Fun);
+% ([],Acc,N,_) -> % length fits in one byte
+% Unused = (8 - (N rem 8)) rem 8,
+% % case N/8 of
+% % _Len =< 255 ->
+% % [30,Unused,(Unused+N)/8,<<Acc:N,0:Unused>>];
+% % _Len ->
+% % Len = (Unused+N)/8,
+% % [31,Unused,<<Len:16>>,<<Acc:N,0:Unused>>]
+% % end
+% {Unused,<<Acc:N,0:Unused>>}
+% end,
+% UnusedAndBin =
+% case NamedBitList of
+% [] -> % dont remove trailing zeroes
+% BitListToBinary(BitListValue,0,0,BitListToBinary);
+% _ ->
+% BitListToBinary(lists:reverse(
+% lists:dropwhile(fun(0)->true;(1)->false end,
+% lists:reverse(BitListValue))),
+% 0,0,BitListToBinary)
+% end,
+% encode_bin_bit_string(C,UnusedAndBin,NamedBitList);
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) when integer(IntegerVal)->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a tuple
+encode_bit_string(C,{Name,Val}, NamedBitList) when atom(Name) ->
+ encode_bit_string(C,Val,NamedBitList).
+
+bit_string_trailing_zeros(BitList,C) when integer(C) ->
+ bit_string_trailing_zeros1(BitList,C,C);
+bit_string_trailing_zeros(BitList,{Lb,Ub}) when integer(Lb) ->
+ bit_string_trailing_zeros1(BitList,Lb,Ub);
+bit_string_trailing_zeros(BitList,{{Lb,Ub},_}) when integer(Lb) ->
+ bit_string_trailing_zeros1(BitList,Lb,Ub);
+bit_string_trailing_zeros(BitList,_) ->
+ BitList.
+
+bit_string_trailing_zeros1(BitList,Lb,Ub) ->
+ case length(BitList) of
+ Lb -> BitList;
+ B when B<Lb -> BitList++lists:duplicate(Lb-B,0);
+ D -> F = fun(L,LB,LB,_,_)->lists:reverse(L);
+ ([0|R],L1,LB,UB,Fun)->Fun(R,L1-1,LB,UB,Fun);
+ (L,L1,_,UB,_)when L1 =< UB -> lists:reverse(L);
+ (_,_L1,_,_,_) ->exit({error,{list_length_BIT_STRING,
+ BitList}}) end,
+ F(lists:reverse(BitList),D,Lb,Ub,F)
+ end.
+
+%% encode_bin_bit_string/3, when value is a tuple of Unused and BinBits.
+%% Unused = integer(),i.e. number unused bits in least sign. byte of
+%% BinBits = binary().
+encode_bin_bit_string(C,{_,BinBits},_NamedBitList)
+ when integer(C),C=<16 ->
+ [45,C,size(BinBits),BinBits];
+encode_bin_bit_string(C,{_Unused,BinBits},_NamedBitList)
+ when integer(C) ->
+ [2,45,C,size(BinBits),BinBits];
+encode_bin_bit_string(C,UnusedAndBin={_,_},NamedBitList) ->
+% UnusedAndBin1 = {Unused1,Bin1} =
+ {Unused1,Bin1} =
+ %% removes all trailing bits if NamedBitList is not empty
+ remove_trailing_bin(NamedBitList,UnusedAndBin),
+ case C of
+% case get_constraint(C,'SizeConstraint') of
+
+% 0 ->
+% []; % borde avg�ras i compile-time
+% V when integer(V),V=<16 ->
+% {Unused2,Bin2} = pad_list(V,UnusedAndBin1),
+% <<BitVal:V,_:Unused2>> = Bin2,
+% % {bits,V,BitVal};
+% [10,V,BitVal];
+% V when integer(V) ->
+% %[align, pad_list(V, UnusedAndBin1)];
+% {Unused2,Bin2} = pad_list(V, UnusedAndBin1),
+% <<BitVal:V,_:Unused2>> = Bin2,
+% [2,octets_unused_to_complete(Unused2,size(Bin2),Bin2)];
+
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+% [encode_length({Lb,Ub},size(Bin1)*8 - Unused1),
+% align,UnusedAndBin1];
+ Size=size(Bin1),
+ [encode_length({Lb,Ub},Size*8 - Unused1),
+ 2,octets_unused_to_complete(Unused1,Size,Bin1)];
+ no ->
+ Size=size(Bin1),
+ [encode_length(undefined,Size*8 - Unused1),
+ 2,octets_unused_to_complete(Unused1,Size,Bin1)];
+ Sc ->
+ Size=size(Bin1),
+ [encode_length(Sc,Size*8 - Unused1),
+ 2,octets_unused_to_complete(Unused1,Size,Bin1)]
+ end.
+
+remove_trailing_bin([], {Unused,Bin}) ->
+ {Unused,Bin};
+remove_trailing_bin(NamedNumberList, {_Unused,Bin}) ->
+ Size = size(Bin)-1,
+ <<Bfront:Size/binary, LastByte:8>> = Bin,
+ %% clear the Unused bits to be sure
+% LastByte1 = LastByte band (((1 bsl Unused) -1) bxor 255),% why this???
+ Unused1 = trailingZeroesInNibble(LastByte band 15),
+ Unused2 =
+ case Unused1 of
+ 4 ->
+ 4 + trailingZeroesInNibble(LastByte bsr 4);
+ _ -> Unused1
+ end,
+ case Unused2 of
+ 8 ->
+ remove_trailing_bin(NamedNumberList,{0,Bfront});
+ _ ->
+ {Unused2,Bin}
+ end.
+
+
+trailingZeroesInNibble(0) ->
+ 4;
+trailingZeroesInNibble(1) ->
+ 0;
+trailingZeroesInNibble(2) ->
+ 1;
+trailingZeroesInNibble(3) ->
+ 0;
+trailingZeroesInNibble(4) ->
+ 2;
+trailingZeroesInNibble(5) ->
+ 0;
+trailingZeroesInNibble(6) ->
+ 1;
+trailingZeroesInNibble(7) ->
+ 0;
+trailingZeroesInNibble(8) ->
+ 3;
+trailingZeroesInNibble(9) ->
+ 0;
+trailingZeroesInNibble(10) ->
+ 1;
+trailingZeroesInNibble(11) ->
+ 0;
+trailingZeroesInNibble(12) -> %#1100
+ 2;
+trailingZeroesInNibble(13) ->
+ 0;
+trailingZeroesInNibble(14) ->
+ 1;
+trailingZeroesInNibble(15) ->
+ 0.
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a tuple {Unused,Bits}. Unused is the number of unused
+%% bits, least significant bits in the last byte of Bits. Bits is
+%% the BIT STRING represented as a binary.
+%%
+decode_compact_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {{8,0},Buffer};
+ V when integer(V),V=<16 -> %fixed length 16 bits or less
+ compact_bit_string(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 -> %fixed length > 16 bits
+ Bytes2 = align(Buffer),
+ compact_bit_string(Bytes2,V,NamedNumberList);
+ V when integer(V) -> % V > 65536 => fragmented value
+ {Bin,Buffer2} = decode_fragmented_bits(Buffer,V),
+ case Buffer2 of
+ {0,_} -> {{0,Bin},Buffer2};
+ {U,_} -> {{8-U,Bin},Buffer2}
+ end;
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ no ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ Sc ->
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_or_named(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 ->
+ Bytes2 = align(Buffer),
+ bit_list_or_named(Bytes2,V,NamedNumberList);
+ V when integer(V) ->
+ Bytes2 = align(Buffer),
+ {BinBits,_Bytes3} = decode_fragmented_bits(Bytes2,V),
+ bit_list_or_named(BinBits,V,NamedNumberList);
+ Sc -> % extension marker
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%% if no named bits are declared we will return a
+%% {Unused,Bits}. Unused = integer(),
+%% Bits = binary().
+compact_bit_string(Buffer,Len,[]) ->
+ getbits_as_binary(Len,Buffer); % {{Unused,BinBits},NewBuffer}
+compact_bit_string(Buffer,Len,NamedNumberList) ->
+ bit_list_or_named(Buffer,Len,NamedNumberList).
+
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_or_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_or_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_or_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_or_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_or_named1(Pos+1,Bt,Names,Acc);
+bit_list_or_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_or_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_or_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_or_named1(_Pos,[],_Names,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(Int) when integer(Int), Int > 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)];
+int_to_bitlist(0) ->
+ [].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+% pad_list(N,In={Unused,Bin}) ->
+% pad_list(N, size(Bin)*8 - Unused, In).
+
+% pad_list(N,Size,In={Unused,Bin}) when N < Size ->
+% exit({error,{asn1,{range_error,{bit_string,In}}}});
+% pad_list(N,Size,{Unused,Bin}) when N > Size, Unused > 0 ->
+% pad_list(N,Size+1,{Unused-1,Bin});
+% pad_list(N,Size,{Unused,Bin}) when N > Size ->
+% pad_list(N,Size+1,{7,<<Bin/binary,0>>});
+% pad_list(N,N,In={Unused,Bin}) ->
+% In.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(C,Bool,{_Name,Val}) ->
+ encode_octet_string(C,Bool,Val);
+encode_octet_string(_C,true,_Val) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(SZ={_,_},false,Val) ->
+% [encode_length(SZ,length(Val)),align,
+% {octets,Val}];
+ Len = length(Val),
+ [encode_length(SZ,Len),2,
+ octets_to_complete(Len,Val)];
+encode_octet_string(SZ,false,Val) when list(SZ) ->
+ Len = length(Val),
+ [encode_length({hd(SZ),lists:max(SZ)},Len),2,
+ octets_to_complete(Len,Val)];
+encode_octet_string(no,false,Val) ->
+ Len = length(Val),
+ [encode_length(undefined,Len),2,
+ octets_to_complete(Len,Val)];
+encode_octet_string(C,_,_) ->
+ exit({error,{not_implemented,C}}).
+
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,1,false) ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+decode_octet_string(Bytes,2,false) ->
+ {Bs,Bytes2}= getbits(Bytes,16),
+ {binary_to_list(<<Bs:16>>),Bytes2};
+decode_octet_string(Bytes,Sv,false) when integer(Sv),Sv=<65535 ->
+ Bytes2 = align(Bytes),
+ getoctets_as_list(Bytes2,Sv);
+decode_octet_string(Bytes,Sv,false) when integer(Sv) ->
+ Bytes2 = align(Bytes),
+ decode_fragmented_octets(Bytes2,Sv);
+decode_octet_string(Bytes,{Lb,Ub},false) ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+decode_octet_string(Bytes,Sv,false) when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+decode_octet_string(Bytes,no,false) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+
+encode_restricted_string(aligned,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,Val);
+
+encode_restricted_string(aligned,Val) when list(Val)->
+ Len = length(Val),
+% [encode_length(undefined,length(Val)),{octets,Val}].
+ [encode_length(undefined,Len),octets_to_complete(Len,Val)].
+
+
+encode_known_multiplier_string(StringType,SizeC,NumBits,CharOutTab,{Name,Val}) when atom(Name) ->
+ encode_known_multiplier_string(StringType,SizeC,NumBits,CharOutTab,Val);
+encode_known_multiplier_string(StringType,SizeC,NumBits,CharOutTab,Val) ->
+ Result = chars_encode2(Val,NumBits,CharOutTab),
+ case SizeC of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} -> %% this case cannot happen !!??
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+%% [align,Result];
+ [2,Result];
+ {Ub,Lb} ->
+% [encode_length({Ub,Lb},length(Val)),align,Result];
+ [encode_length({Ub,Lb},length(Val)),2,Result];
+ no ->
+% [encode_length(undefined,length(Val)),align,Result]
+ [encode_length(undefined,length(Val)),2,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_list(Bytes2,Len).
+
+decode_known_multiplier_string(StringType,SizeC,NumBits,CharInTab,Bytes) ->
+ case SizeC of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,CharInTab,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,CharInTab,Ub);
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,CharInTab,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,CharInTab,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,CharInTab,Len)
+ end.
+
+encode_GeneralString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GeneralString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_GraphicString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GraphicString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_ObjectDescriptor(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_ObjectDescriptor(Bytes) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_TeletexString(_C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,Val).
+decode_TeletexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_VideotexString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_VideotexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{0,0,O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{0,0,O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+% chars_encode(C,StringType,Value) ->
+% case {StringType,get_constraint(C,'PermittedAlphabet')} of
+% {'UniversalString',{_,Sv}} ->
+% exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+% {'BMPString',{_,Sv}} ->
+% exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+% _ ->
+% {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+% chars_encode2(Value,NumBits,CharOutTab)
+% end.
+
+
+chars_encode2([H|T],NumBits,T1={Min,Max,notab}) when H =< Max, H >= Min ->
+% [[10,NumBits,H-Min]|chars_encode2(T,NumBits,T1)];
+ [pre_complete_bits(NumBits,H-Min)|chars_encode2(T,NumBits,T1)];
+chars_encode2([H|T],NumBits,T1={Min,Max,Tab}) when H =< Max, H >= Min ->
+% [[10,NumBits,element(H-Min+1,Tab)]|chars_encode2(T,NumBits,T1)];
+ [pre_complete_bits(NumBits,exit_if_false(H,element(H-Min+1,Tab)))|
+ chars_encode2(T,NumBits,T1)];
+chars_encode2([{A,B,C,D}|T],NumBits,T1={Min,_Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+% [[10,NumBits,((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min]|chars_encode2(T,NumBits,T1)];
+ [pre_complete_bits(NumBits,
+ ((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min)|
+ chars_encode2(T,NumBits,T1)];
+chars_encode2([H={A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+ [pre_complete_bits(NumBits,exit_if_false(H,element(((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min,Tab)))|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_NumBits,{_Min,_Max,_Tab}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+exit_if_false(V,false)->
+ exit({error,{asn1,{"illegal value according to Permitted alphabet constraint",V}}});
+exit_if_false(_,V) ->V.
+
+pre_complete_bits(NumBits,Val) when NumBits =< 8 ->
+ [10,NumBits,Val];
+pre_complete_bits(NumBits,Val) when NumBits =< 16 ->
+ [10,NumBits-8,Val bsr 8,10,8,(Val band 255)];
+pre_complete_bits(NumBits,Val) when NumBits =< 2040 -> % 255 * 8
+% LBUsed = NumBits rem 8,
+% {Unused,Len} = case (8 - LBUsed) of
+% 8 -> {0,NumBits div 8};
+% U -> {U,(NumBits div 8) + 1}
+% end,
+% NewVal = Val bsr LBUsed,
+% [30,Unused,Len,<<NewVal:Len/unit:8,Val:LBUsed,0:Unused>>].
+ Unused = (8 - (NumBits rem 8)) rem 8,
+ Len = NumBits + Unused,
+ [30,Unused,Len div 8,<<(Val bsl Unused):Len>>].
+
+% get_NumBits(C,StringType) ->
+% case get_constraint(C,'PermittedAlphabet') of
+% {'SingleValue',Sv} ->
+% charbits(length(Sv),aligned);
+% no ->
+% case StringType of
+% 'IA5String' ->
+% charbits(128,aligned); % 16#00..16#7F
+% 'VisibleString' ->
+% charbits(95,aligned); % 16#20..16#7E
+% 'PrintableString' ->
+% charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+% 'NumericString' ->
+% charbits(11,aligned); % $ ,"0123456789"
+% 'UniversalString' ->
+% 32;
+% 'BMPString' ->
+% 16
+% end
+% end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+% get_CharOutTab(C,StringType) ->
+% get_CharTab(C,StringType,out).
+
+% get_CharInTab(C,StringType) ->
+% get_CharTab(C,StringType,in).
+
+% get_CharTab(C,StringType,InOut) ->
+% case get_constraint(C,'PermittedAlphabet') of
+% {'SingleValue',Sv} ->
+% get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+% no ->
+% case StringType of
+% 'IA5String' ->
+% {0,16#7F,notab};
+% 'VisibleString' ->
+% get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+% 'PrintableString' ->
+% Chars = lists:sort(
+% " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+% get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+% 'NumericString' ->
+% get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+% 'UniversalString' ->
+% {0,16#FFFFFFFF,notab};
+% 'BMPString' ->
+% {0,16#FFFF,notab}
+% end
+% end.
+
+% get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+% BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+% if
+% Max =< BitValMax ->
+% {0,Max,notab};
+% true ->
+% case InOut of
+% out ->
+% {Min,Max,create_char_tab(Min,Chars)};
+% in ->
+% {Min,Max,list_to_tuple(Chars)}
+% end
+% end.
+
+% create_char_tab(Min,L) ->
+% list_to_tuple(create_char_tab(Min,L,0)).
+% create_char_tab(Min,[Min|T],V) ->
+% [V|create_char_tab(Min+1,T,V+1)];
+% create_char_tab(_Min,[],_V) ->
+% [];
+% create_char_tab(Min,L,V) ->
+% [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+% charbits(NumOfChars,aligned) ->
+% case charbits(NumOfChars) of
+% 1 -> 1;
+% 2 -> 2;
+% B when B =< 4 -> 4;
+% B when B =< 8 -> 8;
+% B when B =< 16 -> 16;
+% B when B =< 32 -> 32
+% end.
+
+% charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+% charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+% charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+% charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+% charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+% charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+% charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+% charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+% charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+% charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+% charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+% charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+% charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+% charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+% charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+% charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+% charbits(NumOfChars) when integer(NumOfChars) ->
+% 16 + charbits1(NumOfChars bsr 16).
+
+% charbits1(0) ->
+% 0;
+% charbits1(NumOfChars) ->
+% 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',_,Len) ->
+ getBMPChars(Bytes,Len);
+chars_decode(Bytes,NumBits,_StringType,CharInTab,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result =
+ if
+ Char < 256 -> Char;
+ true ->
+ list_to_tuple(binary_to_list(<<Char:32>>))
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null(_Val) -> []; % encodes to nothing
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val).
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier({Name,Val}) when atom(Name) ->
+ encode_object_identifier(Val);
+encode_object_identifier(Val) ->
+ OctetList = e_object_identifier(Val),
+ Octets = list_to_binary(OctetList), % performs a flatten at the same time
+% [{debug,object_identifier},encode_length(undefined,size(Octets)),{octets,Octets}].
+ [encode_length(undefined,size(Octets)),
+ octets_to_complete(size(Octets),Octets)].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname,V}) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname,V}) when atom(Cname),list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail]) when E1 >= 0, E1 < 2, E2 < 40 ; E1==2 ->
+ Head = 40*E1 + E2, % weird
+ e_object_elements([Head|Tail],[]);
+e_object_identifier(Oid=[_,_|_Tail]) ->
+ exit({error,{asn1,{'illegal_value',Oid}}}).
+
+e_object_elements([],Acc) ->
+ lists:reverse(Acc);
+e_object_elements([H|T],Acc) ->
+ e_object_elements(T,[e_object_element(H)|Acc]).
+
+e_object_element(Num) when Num < 128 ->
+ Num;
+%% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint([{Key,V}],Key) ->
+ V;
+get_constraint([],_) ->
+ no;
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+
+-ifdef(nodriver).
+
+complete(L) ->
+ case complete1(L) of
+ {[],[]} ->
+ <<0>>;
+ {Acc,[]} ->
+ Acc;
+ {Acc,Bacc} ->
+ [Acc|complete_bytes(Bacc)]
+ end.
+
+
+% this function builds the ugly form of lists [E1|E2] to avoid having to reverse it at the end.
+% this is done because it is efficient and that the result always will be sent on a port or
+% converted by means of list_to_binary/1
+ complete1(InList) when list(InList) ->
+ complete1(InList,[],[]);
+ complete1(InList) ->
+ complete1([InList],[],[]).
+
+ complete1([],Acc,Bacc) ->
+ {Acc,Bacc};
+ complete1([H|T],Acc,Bacc) when list(H) ->
+ {NewH,NewBacc} = complete1(H,Acc,Bacc),
+ complete1(T,NewH,NewBacc);
+
+ complete1([{octets,Bin}|T],Acc,[]) ->
+ complete1(T,[Acc|Bin],[]);
+
+ complete1([{octets,Bin}|T],Acc,Bacc) ->
+ complete1(T,[Acc|[complete_bytes(Bacc),Bin]],[]);
+
+ complete1([{debug,_}|T], Acc,Bacc) ->
+ complete1(T,Acc,Bacc);
+
+ complete1([{bits,N,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,N));
+
+ complete1([{bit,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,1));
+
+ complete1([align|T],Acc,[]) ->
+ complete1(T,Acc,[]);
+ complete1([align|T],Acc,Bacc) ->
+ complete1(T,[Acc|complete_bytes(Bacc)],[]);
+ complete1([{0,Bin}|T],Acc,[]) when binary(Bin) ->
+ complete1(T,[Acc|Bin],[]);
+ complete1([{Unused,Bin}|T],Acc,[]) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8-Unused,
+ complete1(T,[Acc|Bs],[[B bsr Unused]|NumBits]);
+ complete1([{Unused,Bin}|T],Acc,Bacc) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8 - Unused,
+ Bf = complete_bytes(Bacc),
+ complete1(T,[Acc|[Bf,Bs]],[[B bsr Unused]|NumBits]).
+
+
+ complete_update_byte([],Val,Len) ->
+ complete_update_byte([[0]|0],Val,Len);
+ complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len == 8 ->
+ [[0,((Byte bsl Len) + Val) band 255|Bacc]|0];
+ complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len > 8 ->
+ Rem = 8 - NumBits,
+ Rest = Len - Rem,
+ complete_update_byte([[0,((Byte bsl Rem) + (Val bsr Rest)) band 255 |Bacc]|0],Val,Rest);
+ complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) ->
+ [[((Byte bsl Len) + Val) band 255|Bacc]|NumBits+Len].
+
+
+ complete_bytes([[Byte|Bacc]|0]) ->
+ lists:reverse(Bacc);
+ complete_bytes([[Byte|Bacc]|NumBytes]) ->
+ lists:reverse([(Byte bsl (8-NumBytes)) band 255|Bacc]);
+ complete_bytes([]) ->
+ [].
+
+-else.
+
+
+ complete(L) ->
+ case catch port_control(drv_complete,1,L) of
+ Bin when binary(Bin) ->
+ Bin;
+ List when list(List) -> handle_error(List,L);
+ {'EXIT',{badarg,Reason}} ->
+ asn1rt_driver_handler:load_driver(),
+ receive
+ driver_ready ->
+ case catch port_control(drv_complete,1,L) of
+ Bin2 when binary(Bin2) -> Bin2;
+ List when list(List) -> handle_error(List,L);
+ Error -> exit(Error)
+ end;
+ {error,Error} -> % error when loading driver
+ %% the driver could not be loaded
+ exit(Error);
+ Error={port_error,Reason} ->
+ exit(Error)
+ end;
+ {'EXIT',Reason} ->
+ exit(Reason)
+ end.
+
+handle_error([],_)->
+ exit({error,{"memory allocation problem"}});
+handle_error("1",L) -> % error in complete in driver
+ exit({error,{asn1_error,L}});
+handle_error(ErrL,L) ->
+ exit({error,{unknown_error,ErrL,L}}).
+
+-endif.
+
+
+octets_to_complete(Len,Val) when Len < 256 ->
+ [20,Len,Val];
+octets_to_complete(Len,Val) ->
+ [21,<<Len:16>>,Val].
+
+octets_unused_to_complete(Unused,Len,Val) when Len < 256 ->
+ [30,Unused,Len,Val];
+octets_unused_to_complete(Unused,Len,Val) ->
+ [31,Unused,<<Len:16>>,Val].
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_v1.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_v1.erl
new file mode 100644
index 0000000000..ebab269f5d
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/asn1rt_per_v1.erl
@@ -0,0 +1,1843 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per_v1.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per_v1).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/2, fixextensions/2,
+ setoptionals/1, fixoptionals2/3, getext/1, getextension/2,
+ skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/2, getoptionals/3, set_choice/3,
+ getoptionals2/2,
+ encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_small_number/1,
+ encode_boolean/1, decode_boolean/1, encode_length/2,
+ decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1,
+ decode_compact_bit_string/3]).
+-export([encode_enumerated/3, decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([encode_UniversalString/2, decode_UniversalString/2,
+ encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ encode_VisibleString/2, decode_VisibleString/2,
+ encode_BMPString/2, decode_BMPString/2,
+ encode_IA5String/2, decode_IA5String/2,
+ encode_NumericString/2, decode_NumericString/2,
+ encode_ObjectDescriptor/2, decode_ObjectDescriptor/1
+ ]).
+
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+ [{debug,choiceext},{bit,0}];
+setchoiceext(false) ->
+ [{debug,choiceext},{bit,1}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(true) ->
+ [{debug,ext},{bit,1}];
+setext(false) ->
+ [{debug,ext},{bit,0}].
+
+%%
+
+fixoptionals2(OptList,OptLength,Val) when tuple(Val) ->
+ Bits = fixoptionals2(OptList,Val,0),
+ {Val,{bits,OptLength,Bits}};
+
+fixoptionals2([],_Val,Acc) ->
+ %% Optbits
+ Acc;
+fixoptionals2([Pos|Ot],Val,Acc) ->
+ case element(Pos,Val) of
+ asn1_NOVALUE -> fixoptionals2(Ot,Val,Acc bsl 1);
+ asn1_DEFAULT -> fixoptionals2(Ot,Val,Acc bsl 1);
+ _ -> fixoptionals2(Ot,Val,(Acc bsl 1) + 1)
+ end.
+
+
+%%
+%% fixoptionals remains only for backward compatibility purpose
+fixoptionals(OptList,Val) when tuple(Val) ->
+ fixoptionals(OptList,Val,[]);
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([],Val,Acc) ->
+ % return {Val,Opt}
+ {Val,lists:reverse(Acc)};
+fixoptionals([{_,Pos}|Ot],Val,Acc) ->
+ case element(Pos+1,Val) of
+ asn1_NOVALUE -> fixoptionals(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,[0|Acc]);
+ _ -> fixoptionals(Ot,Val,[1|Acc])
+ end.
+
+
+%setoptionals(OptList,Val) ->
+% Vlist = tuple_to_list(Val),
+% setoptionals(OptList,Vlist,1,[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,Acc1,Acc2) ->
+ % return {Val,Opt}
+ {list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]),lists:reverse(Acc1)}.
+
+setoptionals([H|T]) ->
+ [{bit,H}|setoptionals(T)];
+setoptionals([]) ->
+ [{debug,optionals}].
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+ [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_NumChoices,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_integer(Bytes,[{'ValueRange',{0,NumChoices-1}}]).
+
+getoptionals2(Bytes,NumOpt) ->
+ getbits(Bytes,NumOpt).
+
+%% getoptionals is kept only for bakwards compatibility
+getoptionals(Bytes,NumOpt) ->
+ {Blist,Bytes1} = getbits_as_list(NumOpt,Bytes),
+ {list_to_tuple(Blist),Bytes1}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getoptionals/3 is only here for compatibility from 1.3.2
+%% the codegenerator uses getoptionals/2
+
+getoptionals(Bytes,L,NumComp) when list(L) ->
+ {Blist,Bytes1} = getbits_as_list(length(L),Bytes),
+ {list_to_tuple(comptuple(Blist,L,NumComp,1)),Bytes1}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% comptuple is only here for compatibility not used from 1.3.2
+comptuple([Bh|Bt],[{_Name,Nr}|T],NumComp,Nr) ->
+ [Bh|comptuple(Bt,T,NumComp-1,Nr+1)];
+comptuple(Bl,[{Name,Tnr}|Tl],NumComp,Nr) ->
+ [0|comptuple(Bl,[{Name,Tnr}|Tl],NumComp-1,Nr+1)];
+comptuple(_B,_L,0,_Nr) ->
+ [];
+comptuple(B,O,N,Nr) ->
+ [0|comptuple(B,O,N-1,Nr+1)].
+
+%% getbits_as_binary(Num,Bytes) -> {{Unused,BinBits},RestBytes},
+%% Num = integer(),
+%% Bytes = list() | tuple(),
+%% Unused = integer(),
+%% BinBits = binary(),
+%% RestBytes = tuple()
+getbits_as_binary(Num,Bytes) when list(Bytes) ->
+ getbits_as_binary(Num,{0,Bytes});
+getbits_as_binary(_Num,{Used,[]}) ->
+ {{0,<<>>},{Used,[]}};
+getbits_as_binary(Num,{Used,Bits=[H|T]}) ->
+ B1 = case (Num+Used) =< 8 of
+ true -> Num;
+ _ -> 8-Used
+ end,
+ B2 = Num - B1,
+ Pad = (8 - ((B1+B2) rem 8)) rem 8,% Pad /= 8
+ RestBits = lists:nthtail((B1+B2) div 8,Bits),
+ Int = integer_from_list(B2,T,0),
+ NewUsed = (Used + Num) rem 8,
+ {{Pad,<<(H bsr (8-(Used+B1))):B1,Int:B2,0:Pad>>},{NewUsed,RestBits}}.
+
+integer_from_list(_Int,[],BigInt) ->
+ BigInt;
+integer_from_list(Int,[H|_T],BigInt) when Int < 8 ->
+ (BigInt bsl Int) bor (H bsr (8-Int));
+integer_from_list(Int,[H|T],BigInt) ->
+ integer_from_list(Int-8,T,(BigInt bsl 8) bor H).
+
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+getbits_as_list(0,Bytes,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getbits_as_list(Num,Bytes,Acc) ->
+ {Bit,NewBytes} = getbit(Bytes),
+ getbits_as_list(Num-1,NewBytes,[Bit|Acc]).
+
+getbit(Bytes) ->
+% io:format("getbit:~p~n",[Bytes]),
+ getbit1(Bytes).
+
+getbit1({7,[H|T]}) ->
+ {H band 1,{0,T}};
+getbit1({Pos,[H|T]}) ->
+ {(H bsr (7-Pos)) band 1,{(Pos+1) rem 8,[H|T]}};
+getbit1(Bytes) when list(Bytes) ->
+ getbit1({0,Bytes}).
+
+%% This could be optimized
+getbits(Buffer,Num) ->
+% io:format("getbits:Buffer = ~p~nNum=~p~n",[Buffer,Num]),
+ getbits(Buffer,Num,0).
+
+getbits(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getbits(Buffer,Num,Acc) ->
+ {B,NewBuffer} = getbit(Buffer),
+ getbits(NewBuffer,Num-1,B + (Acc bsl 1)).
+
+
+getoctet(Bytes) when list(Bytes) ->
+ getoctet({0,Bytes});
+getoctet(Bytes) ->
+% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+ getoctet1(Bytes).
+
+getoctet1({0,[H|T]}) ->
+ {H,{0,T}};
+getoctet1({_Pos,[_,H|T]}) ->
+ {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,[_H|T]}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+getoctets(Buffer,Num) ->
+% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+ getoctets(Buffer,Num,0).
+
+getoctets(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getoctets(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+getoctets_as_list(Buffer,Num) ->
+ getoctets_as_list(Buffer,Num,[]).
+
+getoctets_as_list(Buffer,0,Acc) ->
+ {lists:reverse(Acc),Buffer};
+getoctets_as_list(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+ [{bit,0}, % the value is in the root set
+ encode_integer([{'ValueRange',{0,Len1-1}}],N)];
+ N when integer(N) ->
+ [{bit,0}]; % no encoding if only 0 or 1 alternative
+ false ->
+ [{bit,1}, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_integer([{'ValueRange',{0,Len-1}}],N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_Alt,[],_Tag) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_Constraint, Val) when list(Val) ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}];
+encode_open_type(_Constraint, Val) when binary(Val) ->
+ [encode_length(undefined,size(Val)),align,
+ {octets,binary_to_list(Val)}].
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _Constraint) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_) when integer(V) ->
+ encode_integer(C,V);
+encode_integer(C,{Name,V},NamedNumberList) when atom(Name) ->
+ encode_integer(C,V,NamedNumberList).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer({Rc,_Ec},Val) ->
+ case (catch encode_integer(Rc,Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+ [{bit,1},encode_unconstrained_number(Val)];
+ Encoded ->
+ [{bit,0},Encoded]
+ end;
+encode_integer(C,Val ) when list(C) ->
+ case get_constraint(C,'SingleValue') of
+ no ->
+ encode_integer1(C,Val);
+ V when integer(V),V == Val ->
+ []; % a type restricted to a single value encodes to nothing
+ V when list(V) ->
+ case lists:member(Val,V) of
+ true ->
+ encode_integer1(C,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end.
+
+encode_integer1(C, Val) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ encode_unconstrained_number(Val);
+ {Lb,'MAX'} ->
+ encode_semi_constrained_number(Lb,Val);
+ %% positive with range
+ {Lb,Ub} when Val >= Lb,
+ Ub >= Val ->
+ encode_constrained_number(VR,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,VR,Val}}})
+ end.
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,{Rc,_Ec}) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,Rc);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ V when list(V) ->
+ {Val,Buffer2} = decode_integer1(Buffer,C),
+ case lists:member(Val,V) of
+ true ->
+ {Val,Buffer2};
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_,_} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+% X.691:10.6 Encoding of a normally small non-negative whole number
+% Use this for encoding of CHOICE index if there is an extension marker in
+% the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+ [{bit,0},{bits,6,Val}];
+encode_small_number(Val) ->
+ [{bit,1},encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,{0,'MAX'})
+ end.
+
+% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Octs = eint_positive(Val2),
+ [encode_length(undefined,length(Octs)),{octets,Octs}].
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+ {bits,1,Val2};
+ Range =< 4 ->
+ {bits,2,Val2};
+ Range =< 8 ->
+ {bits,3,Val2};
+ Range =< 16 ->
+ {bits,4,Val2};
+ Range =< 32 ->
+ {bits,5,Val2};
+ Range =< 64 ->
+ {bits,6,Val2};
+ Range =< 128 ->
+ {bits,7,Val2};
+ Range =< 255 ->
+ {bits,8,Val2};
+ Range =< 256 ->
+ {octets,1,Val2};
+ Range =< 65536 ->
+ {octets,2,Val2};
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,3},length(Octs)),{octets,Octs}];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,4},length(Octs)),{octets,Octs}];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,5},length(Octs)),{octets,Octs}];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end.
+
+decode_constrained_number(Buffer,{Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+% Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}];
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}].
+
+%% used for positive Values which don't need a sign bit
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+%% used for signed positive values
+
+%eint(Val, Ack) ->
+% X = Val band 255,
+% Next = Val bsr 8,
+% if
+% Next == 0, X >= 127 ->
+% [0,X|Ack];
+% Next == 0 ->
+% [X|Ack];
+% true ->
+% eint(Next,[X|Ack])
+% end.
+
+%%% used for signed negative values
+%enint(Val, Acc) ->
+% NumOctets = if
+% -Val < 16#80 -> 1;
+% -Val < 16#8000 ->2;
+% -Val < 16#800000 ->3;
+% -Val < 16#80000000 ->4;
+% -Val < 16#8000000000 ->5;
+% -Val < 16#800000000000 ->6;
+% -Val < 16#80000000000000 ->7;
+% -Val < 16#8000000000000000 ->8;
+% -Val < 16#800000000000000000 ->9
+% end,
+% enint(Val,Acc,NumOctets).
+
+%enint(Val, Acc,0) ->
+% Acc;
+%enint(Val, Acc,NumOctets) ->
+% enint(Val bsr 8,[Val band 255|Acc],NumOctets-1).
+
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(Val,Acc) when Val > 0 ->
+ minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+minimum_octets(0,Acc) ->
+ Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+ {octet,Len band 16#7F};
+ Len < 16384 ->
+ {octets,2,2#1000000000000000 bor Len};
+ true ->
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length({Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number({Lb,Ub},Len);
+encode_length({{Lb,Ub},[]},Len) when Ub =< 65535 ,Lb >= 0 ->
+ %% constrained extensible
+ [{bit,0},encode_constrained_number({Lb,Ub},Len)];
+encode_length(SingleValue,_) when integer(SingleValue) ->
+ [].
+
+encode_small_length(Len) when Len =< 64 ->
+ [{bit,0},{bits,6,Len-1}];
+encode_small_length(Len) ->
+ [{bit,1},encode_length(undefined,Len)].
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ Buffer2 = align(Buffer),
+ {Bits,_} = getbits(Buffer2,2),
+ case Bits of
+ 2 ->
+ {Val,Bytes3} = getoctets(Buffer2,2),
+ {(Val band 16#3FFF),Bytes3};
+ 3 ->
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+ _ ->
+ {Val,Bytes3} = getoctet(Buffer2),
+ {Val band 16#7F,Bytes3}
+ end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+
+decode_length(Buffer,{{Lb,Ub},[]}) ->
+ case getbit(Buffer) of
+ {0,Buffer2} ->
+ decode_length(Buffer2, {Lb,Ub})
+ end;
+ % X.691:10.9.3.5
+decode_length(Buffer,{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+ case getbit(Buffer) of
+ {0,Remain} ->
+ getbits(Remain,7);
+ {1,_Remain} ->
+ {Val,Remain2} = getoctets(Buffer,2),
+ {Val band 2#0111111111111111, Remain2}
+ end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+% X.691:11
+encode_boolean({Name,Val}) when atom(Name) ->
+ encode_boolean(Val);
+encode_boolean(true) ->
+ {bit,1};
+encode_boolean(false) ->
+ {bit,0};
+encode_boolean(Val) ->
+ exit({error,{asn1,{encode_boolean,Val}}}).
+
+
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:12
+%% ENUMERATED
+%%
+%% encode_enumerated(C,Value,NamedNumberTup) -> CompleteList
+%%
+%%
+
+encode_enumerated(C,{Name,Value},NamedNumberList) when
+ atom(Name),list(NamedNumberList) ->
+ encode_enumerated(C,Value,NamedNumberList);
+
+%% ENUMERATED with extension mark
+encode_enumerated(_C,{asn1_enum,Value},{_Nlist1,Nlist2}) when Value >= length(Nlist2) ->
+ [{bit,1},encode_small_number(Value)];
+encode_enumerated(C,Value,{Nlist1,Nlist2}) ->
+ case enum_search(Value,Nlist1,0) of
+ NewV when integer(NewV) ->
+ [{bit,0},encode_integer(C,NewV)];
+ false ->
+ case enum_search(Value,Nlist2,0) of
+ ExtV when integer(ExtV) ->
+ [{bit,1},encode_small_number(ExtV)];
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end
+ end;
+
+encode_enumerated(C,Value,NamedNumberList) when list(NamedNumberList) ->
+ case enum_search(Value,NamedNumberList,0) of
+ NewV when integer(NewV) ->
+ encode_integer(C,NewV);
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end.
+
+%% returns the ordinal number from 0 ,1 ... in the list where Name is found
+%% or false if not found
+%%
+enum_search(Name,[Name|_NamedNumberList],Acc) ->
+ Acc;
+enum_search(Name,[_H|T],Acc) ->
+ enum_search(Name,T,Acc+1);
+enum_search(_,[],_) ->
+ false. % name not found !error
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+
+%% when the value is a list of {Unused,BinBits}, where
+%% Unused = integer(),
+%% BinBits = binary().
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList) when integer(Unused),
+ binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList);
+
+%% when the value is a list of named bits
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+encode_bit_string(C, [{bit,No} | RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes([{bit,No} | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+
+encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+ Bl1 =
+ case NamedBitList of
+ [] -> % dont remove trailing zeroes
+ BitListValue;
+ _ -> % first remove any trailing zeroes
+ lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitListValue)))
+ end,
+ BitList = [{bit,X} || X <- Bl1],
+ BListLen = length(BitList),
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ []; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ pad_list(V,BitList);
+ V when integer(V) -> % fixed length 16 bits or less
+ [align,pad_list(V,BitList)];
+ {Lb,Ub} when integer(Lb),integer(Ub),BListLen<Lb ->
+ %% padding due to OTP-4353
+ [encode_length({Lb,Ub},Lb),align,pad_list(Lb,BitList)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ [encode_length({Lb,Ub},length(BitList)),align,BitList];
+ no ->
+ [encode_length(undefined,length(BitList)),align,BitList];
+ Sc={{Lb,Ub},_} when integer(Lb),integer(Ub),BListLen<Lb ->
+ %% padding due to OTP-4353
+ [encode_length(Sc,Lb),align,pad_list(Lb,BitList)];
+ Sc -> % extension marker
+ [encode_length(Sc,length(BitList)),align,BitList]
+ end;
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) when integer(IntegerVal)->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a tuple
+encode_bit_string(C,{Name,Val}, NamedBitList) when atom(Name) ->
+ encode_bit_string(C,Val,NamedBitList).
+
+
+%% encode_bin_bit_string/3, when value is a tuple of Unused and BinBits.
+%% Unused = integer(),
+%% BinBits = binary().
+
+encode_bin_bit_string(C,{Unused,BinBits},NamedBitList) ->
+ RemoveZerosIfNNL =
+ fun({NNL,BitList}) ->
+ case NNL of
+ [] -> BitList;
+ _ ->
+ lists:reverse(
+ lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitList)))
+ end
+ end,
+ {OctetList,OLSize,LastBits} =
+ case size(BinBits) of
+ N when N>1 ->
+ IntList = binary_to_list(BinBits),
+ [H|T] = lists:reverse(IntList),
+ Bl1 = RemoveZerosIfNNL({NamedBitList,lists:reverse(int_to_bitlist(H,8-Unused))}),% lists:sublist obsolete if trailing bits are zero !
+ {[{octet,X} || X <- lists:reverse(T)],size(BinBits)-1,
+ [{bit,X} || X <- Bl1]};
+ 1 ->
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1>> = BinBits,
+ {[],0,[{bit,X} || X <- lists:sublist([B7,B6,B5,B4,B3,B2,B1,B0],8-Unused)]};
+ _ ->
+ {[],0,[]}
+ end,
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ V when integer(V),V=<16 ->
+ [OctetList, pad_list(V,LastBits)];
+ V when integer(V) ->
+% [OctetList, align, pad_list(V rem 8,LastBits)];
+ [align,OctetList, pad_list(V rem 8,LastBits)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ NewLastBits = maybe_pad(Lb,length(LastBits)+(OLSize*8),LastBits,NamedBitList),
+ [encode_length({Lb,Ub},length(NewLastBits)+(OLSize*8)),
+% OctetList,align,LastBits];
+ align,OctetList,NewLastBits];
+ no ->
+ [encode_length(undefined,length(LastBits)+(OLSize*8)),
+% OctetList,align,LastBits];
+ align,OctetList,LastBits];
+ Sc={{Lb,_},_} when integer(Lb) ->
+ NewLastBits = maybe_pad(Lb,length(LastBits)+(OLSize*8),LastBits,NamedBitList),
+ [encode_length(Sc,length(NewLastBits)+(OLSize*8)),
+ align,OctetList,NewLastBits];
+ Sc ->
+ [encode_length(Sc,length(LastBits)+(OLSize*8)),
+% OctetList,align,LastBits]
+ align,OctetList,LastBits]
+ end.
+
+maybe_pad(_,_,Bits,[]) ->
+ Bits;
+maybe_pad(Lb,LenBits,Bits,_) when Lb>LenBits ->
+ pad_list(Lb,Bits);
+maybe_pad(_,_,Bits,_) ->
+ Bits.
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a tuple {Unused,Bits}. Unused is the number of unused
+%% bits, least significant bits in the last byte of Bits. Bits is
+%% the BIT STRING represented as a binary.
+%%
+decode_compact_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {{0,<<>>},Buffer};
+ V when integer(V),V=<16 -> %fixed length 16 bits or less
+ compact_bit_string(Buffer,V,NamedNumberList);
+ V when integer(V) -> %fixed length > 16 bits
+ Bytes2 = align(Buffer),
+ compact_bit_string(Bytes2,V,NamedNumberList);
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ Sc ->
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_to_named(Buffer,V,NamedNumberList);
+ V when integer(V) -> % fixed length 16 bits or less
+ Bytes2 = align(Buffer),
+ bit_list_to_named(Bytes2,V,NamedNumberList);
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList);
+ Sc -> % extension marker
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%% if no named bits are declared we will return a
+%% {Unused,Bits}. Unused = integer(),
+%% Bits = binary().
+compact_bit_string(Buffer,Len,[]) ->
+ getbits_as_binary(Len,Buffer); % {{Unused,BinBits},NewBuffer}
+compact_bit_string(Buffer,Len,NamedNumberList) ->
+ bit_list_to_named(Buffer,Len,NamedNumberList).
+
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_to_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_to_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_to_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_to_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_to_named1(Pos+1,Bt,Names,Acc);
+bit_list_to_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_to_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_to_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_to_named1(_Pos,[],_Names,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+int_to_bitlist(_Int,0) ->
+ [];
+int_to_bitlist(0,N) ->
+ [0|int_to_bitlist(0,N-1)];
+int_to_bitlist(Int,N) ->
+ [Int band 1 | int_to_bitlist(Int bsr 1, N-1)].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _XPos) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+pad_list(0,BitList) ->
+ case BitList of
+ [] -> [];
+ _ -> exit({error,{asn1,{range_error,{bit_string,BitList}}}})
+ end;
+pad_list(N,[Bh|Bt]) ->
+ [Bh|pad_list(N-1,Bt)];
+pad_list(N,[]) ->
+ [{bit,0},pad_list(N-1,[])].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,{Name,Val}) when atom(Name) ->
+ encode_octet_string(C,false,Val);
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(C,Bool,{_Name,Val}) ->
+ encode_octet_string(C,Bool,Val);
+encode_octet_string(_,true,_) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(C,false,Val) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ 1 ->
+ [V] = Val,
+ {bits,8,V};
+ 2 ->
+ [V1,V2] = Val,
+ [{bits,8,V1},{bits,8,V2}];
+ Sv when Sv =<65535, Sv == length(Val) -> % fixed length
+ [align,{octets,Val}];
+ {Lb,Ub} ->
+ [encode_length({Lb,Ub},length(Val)),align,
+ {octets,Val}];
+ Sv when list(Sv) ->
+ [encode_length({hd(Sv),lists:max(Sv)},length(Val)),align,
+ {octets,Val}];
+ no ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}]
+ end.
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,C,false) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ {[],Bytes};
+ 1 ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+ 2 ->
+ {B1,Bytes2}= getbits(Bytes,8),
+ {B2,Bytes3}= getbits(Bytes2,8),
+ {[B1,B2],Bytes3};
+ {_,0} ->
+ {[],Bytes};
+ Sv when integer(Sv), Sv =<65535 -> % fixed length
+ Bytes2 = align(Bytes),
+ getoctets_as_list(Bytes2,Sv);
+ {Lb,Ub} ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ Sv when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ no ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+
+encode_restricted_string(aligned,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,Val);
+
+encode_restricted_string(aligned,Val) when list(Val)->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}].
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,{Name,Val}) when atom(Name) ->
+ encode_known_multiplier_string(aligned,StringType,C,false,Val);
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,Val) ->
+ Result = chars_encode(C,StringType,Val),
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} ->
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ 0 ->
+ [];
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ [align,Result];
+ {Ub,Lb} ->
+ [encode_length({Ub,Lb},length(Val)),align,Result];
+ Vl when list(Vl) ->
+ [encode_length({lists:min(Vl),lists:max(Vl)},length(Val)),align,Result];
+ no ->
+ [encode_length(undefined,length(Val)),align,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+decode_known_multiplier_string(Bytes,aligned,StringType,C,_Ext) ->
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,C,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,C,Ub);
+ 0 ->
+ {[],Bytes};
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len)
+ end.
+
+
+encode_NumericString(C,Val) ->
+ encode_known_multiplier_string(aligned,'NumericString',C,false,Val).
+decode_NumericString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'NumericString',C,false).
+
+encode_PrintableString(C,Val) ->
+ encode_known_multiplier_string(aligned,'PrintableString',C,false,Val).
+decode_PrintableString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'PrintableString',C,false).
+
+encode_VisibleString(C,Val) -> % equivalent with ISO646String
+ encode_known_multiplier_string(aligned,'VisibleString',C,false,Val).
+decode_VisibleString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'VisibleString',C,false).
+
+encode_IA5String(C,Val) ->
+ encode_known_multiplier_string(aligned,'IA5String',C,false,Val).
+decode_IA5String(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'IA5String',C,false).
+
+encode_BMPString(C,Val) ->
+ encode_known_multiplier_string(aligned,'BMPString',C,false,Val).
+decode_BMPString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'BMPString',C,false).
+
+encode_UniversalString(C,Val) ->
+ encode_known_multiplier_string(aligned,'UniversalString',C,false,Val).
+decode_UniversalString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'UniversalString',C,false).
+
+%% end of known-multiplier strings for which PER visible constraints are
+%% applied
+
+encode_GeneralString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GeneralString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_GraphicString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GraphicString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_ObjectDescriptor(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_ObjectDescriptor(Bytes) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_TeletexString(_C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,Val).
+decode_TeletexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_VideotexString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_VideotexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{0,0,O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{0,0,O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+chars_encode(C,StringType,Value) ->
+ case {StringType,get_constraint(C,'PermittedAlphabet')} of
+ {'UniversalString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+ _ ->
+ {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+ chars_encode2(Value,NumBits,CharOutTab)
+ end.
+
+chars_encode2([H|T],NumBits,{Min,Max,notab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,H-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|T],NumBits,{Min,Max,Tab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,element(H-Min+1,Tab)}|chars_encode2(T,NumBits,{Min,Max,Tab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,((((((A bsl 8) + B) bsl 8) + C) bsl 8) + D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,element((A*B*C*D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,element(((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_,{_,_,_}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B > 2, B =< 4 -> 4;
+ B when B > 4, B =< 8 -> 8;
+ B when B > 8, B =< 16 -> 16;
+ B when B > 16, B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',C,Len) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ no ->
+ getBMPChars(Bytes,Len);
+ _ ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet constraint"}}})
+ end;
+chars_decode(Bytes,NumBits,StringType,C,Len) ->
+ CharInTab = get_CharInTab(C,StringType),
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result = case minimum_octets(Char+Min) of
+ [NewChar] -> NewChar;
+ [C1,C2] -> {0,0,C1,C2};
+ [C1,C2,C3] -> {0,C1,C2,C3};
+ [C1,C2,C3,C4] -> {C1,C2,C3,C4}
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val);
+encode_null(_) -> []. % encodes to nothing
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier({Name,Val}) when atom(Name) ->
+ encode_object_identifier(Val);
+encode_object_identifier(Val) ->
+ Octets = e_object_identifier(Val,notag),
+ [{debug,object_identifier},encode_length(undefined,length(Octets)),{octets,Octets}].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V},DoTag) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),list(V) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier(V,DoTag) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+
+% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail],_DoTag) when E1 =< 2 ->
+ Head = 40*E1 + E2, % weird
+ Res = e_object_elements([Head|Tail]),
+% dotag(DoTag,[6],elength(length(Res)+1),[Head|Res]),
+ Res.
+
+e_object_elements([]) ->
+ [];
+e_object_elements([H|T]) ->
+ lists:append(e_object_element(H),e_object_elements(T)).
+
+e_object_element(Num) when Num < 128 ->
+ [Num];
+% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+complete(InList) when list(InList) ->
+ complete(InList,[],0);
+complete(InList) ->
+ complete([InList],[],0).
+
+complete([{debug,_}|T], Acc, Acclen) ->
+ complete(T,Acc,Acclen);
+complete([H|T],Acc,Acclen) when list(H) ->
+ complete(lists:concat([H,T]),Acc,Acclen);
+
+
+complete([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+ Newval = case N of
+ 1 ->
+ Val4 = Val band 16#FF,
+ [Val4];
+ 2 ->
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val3,Val4];
+ 3 ->
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val2,Val3,Val4];
+ 4 ->
+ Val1 = (Val bsr 24) band 16#FF,
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val1,Val2,Val3,Val4]
+ end,
+ complete([{octets,Newval}|T],Acc,Acclen);
+
+complete([{octets,Oct}|T],[],_Acclen) when list(Oct) ->
+ complete(T,lists:reverse(Oct),0);
+complete([{octets,Oct}|T],[Hacc|Tacc],Acclen) when list(Oct) ->
+ Rest = 8 - Acclen,
+ if
+ Rest == 8 ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc|Tacc]]),0);
+ true ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc bsl Rest|Tacc]]),0)
+ end;
+
+complete([{bit,Val}|T], Acc, Acclen) ->
+ complete([{bits,1,Val}|T],Acc,Acclen);
+complete([{octet,Val}|T], Acc, Acclen) ->
+ complete([{octets,1,Val}|T],Acc,Acclen);
+
+complete([{bits,N,Val}|T], Acc, 0) when N =< 8 ->
+ complete(T,[Val|Acc],N);
+complete([{bits,N,Val}|T], [Hacc|Tacc], Acclen) when N =< 8 ->
+ Rest = 8 - Acclen,
+ if
+ Rest >= N ->
+ complete(T,[(Hacc bsl N) + Val|Tacc],(Acclen+N) rem 8);
+ true ->
+ Diff = N - Rest,
+ NewHacc = (Hacc bsl Rest) + (Val bsr Diff),
+ Mask = element(Diff,{1,3,7,15,31,63,127,255}),
+ complete(T,[(Val band Mask),NewHacc|Tacc],(Acclen+N) rem 8)
+ end;
+complete([{bits,N,Val}|T], Acc, Acclen) -> % N > 8
+ complete([{bits,N-8,Val bsr 8},{bits,8,Val band 255}|T],Acc,Acclen);
+
+complete([align|T],Acc,0) ->
+ complete(T,Acc,0);
+complete([align|T],[Hacc|Tacc],Acclen) ->
+ Rest = 8 - Acclen,
+ complete(T,[Hacc bsl Rest|Tacc],0);
+complete([{octets,_N,Val}|T],Acc,Acclen) when list(Val) -> % no security check here
+ complete([{octets,Val}|T],Acc,Acclen);
+
+complete([],[],0) ->
+ [0]; % a complete encoding must always be at least 1 byte
+complete([],Acc,0) ->
+ lists:reverse(Acc);
+complete([],[Hacc|Tacc],Acclen) when Acclen > 0->
+ Rest = 8 - Acclen,
+ NewHacc = Hacc bsl Rest,
+ lists:reverse([NewHacc|Tacc]).
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_history.sgml b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_history.sgml
new file mode 100644
index 0000000000..f63b3360eb
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_history.sgml
@@ -0,0 +1,100 @@
+<!doctype chapter PUBLIC "-//Stork//DTD chapter//EN">
+<!--
+ ``The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved via the world wide web at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+ Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+ AB. All Rights Reserved.''
+
+ $Id: notes_history.sgml,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+-->
+<chapter>
+ <header>
+ <title>ASN1 Release Notes (Old)</title>
+ <prepared>Kenneth Lundin</prepared>
+ <responsible>Kenneth Lundin</responsible>
+ <docno></docno>
+ <approved>Kenneth Lundin</approved>
+ <checked>Kenneth Lundin</checked>
+ <date>98-02-02</date>
+ <rev>A</rev>
+ <file>notes_history.sgml</file>
+ </header>
+
+ <p>This document describes the changes made to old versions of the <c>asn1</c> application.
+
+ <section>
+ <title>ASN1 0.8.1</title>
+ <p>This is the first release of the ASN1 application. This version is released
+ for beta-testing. Some functionality will be added until the 1.0 version is
+ released. A list of missing features and restrictions can be found in the
+ chapter below.
+
+ <section>
+ <title>Missing features and other restrictions</title>
+ <list>
+ <item>
+ <p>The encoding rules BER and PER (aligned) is supported. <em>PER (unaligned)
+ IS NOT SUPPORTED</em>.
+ <item>
+ <p>NOT SUPPORTED types <c>ANY</c> and <c>ANY DEFINED BY</c>
+ (is not in the standard any more).
+ <item>
+ <p>NOT SUPPORTED types <c>EXTERNAL</c> and <c>EMBEDDED-PDV</c>.
+ <item>
+ <p>NOT SUPPORTED type <c>REAL</c> (planned to be implemented).
+ <item>
+ <p>The code generation support for value definitions in the ASN.1 notation is very limited
+ (planned to be enhanced).
+ <item>
+ <p>The support for constraints is limited to:
+ <list>
+ <item><p>
+ SizeConstraint SIZE(X)
+ <item><p>
+ SingleValue (1)
+ <item><p>
+ ValueRange (X..Y)
+ <item><p>
+ PermittedAlpabet FROM (but not for BMPString and UniversalString when generating PER).
+ </list>
+ <p>Complex expressions in constraints is not supported (planned to be extended).
+ <item>
+ <p>The current version of the compiler has very limited error checking:
+ <list>
+ <item><p>Stops at first syntax error.
+ <item><p>Does not stop when a reference to an undefined type is found ,
+ but prints an error message. Compilation of the generated
+ Erlang module will then fail.
+ <item><p>A whole number of other semantical controls is currently missing. This
+ means that the compiler will give little or bad help to detect what's wrong
+ with an ASN.1 specification, but will mostly work very well when the
+ ASN.1 specification is correct.
+ </list>
+ <item>
+ <p>The maximum INTEGER supported in this version is a signed 64 bit integer. This
+ limitation is probably quite reasonable. (Planned to be extended).
+ <item>
+ <p>Only AUTOMATIC TAGS supported for PER.
+ <item>
+ <p>Only EXPLICIT and IMPLICIT TAGS supported for BER.
+ <item>
+ <p>The compiler supports decoding of BER-data with indefinite length but it is
+ not possible to produce data with indefinite length with the encoder.
+ </list>
+ </section>
+
+ </section>
+</chapter>
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_latest.sgml b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_latest.sgml
new file mode 100644
index 0000000000..7accc797a6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/asn1/notes_latest.sgml
@@ -0,0 +1,100 @@
+<!doctype chapter PUBLIC "-//Stork//DTD chapter//EN">
+<!--
+ ``The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved via the world wide web at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+ Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+ AB. All Rights Reserved.''
+
+ $Id: notes_latest.sgml,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+-->
+<chapter>
+ <header>
+ <title>ASN1 Release Notes</title>
+ <prepared>Kenneth Lundin</prepared>
+ <responsible>Kenneth Lundin</responsible>
+ <docno></docno>
+ <approved>Kenneth Lundin</approved>
+ <checked>Kenneth Lundin</checked>
+ <date>97-10-07</date>
+ <rev>A</rev>
+ <file>notes_latest.sgml</file>
+ </header>
+
+ <p>This document describes the changes made to the asn1 application.
+
+ <section>
+ <title>ASN1 0.8.1</title>
+ <p>This is the first release of the ASN1 application. This version is released
+ for beta-testing. Some functionality will be added until the 1.0 version is
+ released. A list of missing features and restrictions can be found in the
+ chapter below.
+
+ <section>
+ <title>Missing features and other restrictions</title>
+ <list>
+ <item>
+ <p>The encoding rules BER and PER (aligned) is supported. <em>PER (unaligned)
+ IS NOT SUPPORTED</em>.
+ <item>
+ <p>NOT SUPPORTED types <c>ANY</c> and <c>ANY DEFINED BY</c>
+ (is not in the standard any more).
+ <item>
+ <p>NOT SUPPORTED types <c>EXTERNAL</c> and <c>EMBEDDED-PDV</c>.
+ <item>
+ <p>NOT SUPPORTED type <c>REAL</c> (planned to be implemented).
+ <item>
+ <p>The code generation support for value definitions in the ASN.1 notation is very limited
+ (planned to be enhanced).
+ <item>
+ <p>The support for constraints is limited to:
+ <list>
+ <item><p>
+ SizeConstraint SIZE(X)
+ <item><p>
+ SingleValue (1)
+ <item><p>
+ ValueRange (X..Y)
+ <item><p>
+ PermittedAlpabet FROM (but not for BMPString and UniversalString when generating PER).
+ </list>
+ <p>Complex expressions in constraints is not supported (planned to be extended).
+ <item>
+ <p>The current version of the compiler has very limited error checking:
+ <list>
+ <item><p>Stops at first syntax error.
+ <item><p>Does not stop when a reference to an undefined type is found ,
+ but prints an error message. Compilation of the generated
+ Erlang module will then fail.
+ <item><p>A whole number of other semantical controls is currently missing. This
+ means that the compiler will give little or bad help to detect what's wrong
+ with an ASN.1 specification, but will mostly work very well when the
+ ASN.1 specification is correct.
+ </list>
+ <item>
+ <p>The maximum INTEGER supported in this version is a signed 64 bit integer. This
+ limitation is probably quite reasonable. (Planned to be extended).
+ <item>
+ <p>Only AUTOMATIC TAGS supported for PER.
+ <item>
+ <p>Only EXPLICIT and IMPLICIT TAGS supported for BER.
+ <item>
+ <p>The compiler supports decoding of BER-data with indefinite length but it is
+ not possible to produce data with indefinite length with the encoder.
+ </list>
+ </section>
+
+ </section>
+</chapter>
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/Makefile b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/Makefile
new file mode 100644
index 0000000000..ab0d7c0a63
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/Makefile
@@ -0,0 +1,178 @@
+# ``The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved via the world wide web at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+# Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+# AB. All Rights Reserved.''
+#
+# $Id: Makefile,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+
+VSN = $(INETS_VSN)
+APP_VSN = "inets-$(VSN)"
+
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/inets-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULES = \
+ ftp \
+ http \
+ http_lib \
+ httpc_handler \
+ httpc_manager \
+ uri \
+ httpd \
+ httpd_acceptor \
+ httpd_acceptor_sup \
+ httpd_conf \
+ httpd_example \
+ httpd_manager \
+ httpd_misc_sup \
+ httpd_parse \
+ httpd_request_handler \
+ httpd_response \
+ httpd_socket \
+ httpd_sup \
+ httpd_util \
+ httpd_verbosity \
+ inets_sup \
+ mod_actions \
+ mod_alias \
+ mod_auth \
+ mod_auth_plain \
+ mod_auth_dets \
+ mod_auth_mnesia \
+ mod_auth_server \
+ mod_browser \
+ mod_cgi \
+ mod_dir \
+ mod_disk_log \
+ mod_esi \
+ mod_get \
+ mod_head \
+ mod_htaccess \
+ mod_include \
+ mod_log \
+ mod_range \
+ mod_responsecontrol \
+ mod_trace \
+ mod_security \
+ mod_security_server
+
+HRL_FILES = httpd.hrl httpd_verbosity.hrl mod_auth.hrl \
+ http.hrl jnets_httpd.hrl
+
+ERL_FILES = $(MODULES:%=%.erl)
+
+TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
+
+APP_FILE= inets.app
+APPUP_FILE= inets.appup
+
+APP_SRC= $(APP_FILE).src
+APP_TARGET= $(EBIN)/$(APP_FILE)
+
+APPUP_SRC= $(APPUP_FILE).src
+APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
+
+# ----------------------------------------------------
+# INETS FLAGS
+# ----------------------------------------------------
+# DONT_USE_VERBOSITY = -Ddont_use_verbosity=true
+INETS_FLAGS = -D'SERVER_SOFTWARE="inets/$(VSN)"' \
+ -Ddefault_verbosity=silence \
+ $(DONT_USE_VERBOSITY)
+
+# INETS_DEBUG_DEFAULT = d
+ifeq ($(INETS_DEBUG),)
+ INETS_DEBUG = $(INETS_DEBUG_DEFAULT)
+endif
+
+ifeq ($(INETS_DEBUG),c)
+ INETS_FLAGS += -Dinets_cdebug -Dinets_debug -Dinets_log -Dinets_error
+endif
+ifeq ($(INETS_DEBUG),d)
+ INETS_FLAGS += -Dinets_debug -Dinets_log -Dinets_error
+endif
+ifeq ($(INETS_DEBUG),l)
+ INETS_FLAGS += -Dinets_log -Dinets_error
+endif
+ifeq ($(INETS_DEBUG),e)
+ INETS_FLAGS += -Dinets_error
+endif
+
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_FLAGS +=
+
+ifeq ($(WARN_UNUSED_WARS),true)
+ERL_COMPILE_FLAGS += +warn_unused_vars
+endif
+
+ERL_COMPILE_FLAGS += $(INETS_FLAGS) \
+ +'{parse_transform,sys_pre_attributes}' \
+ +'{attribute,insert,app_vsn,$(APP_VSN)}'
+
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt: $(TARGET_FILES)
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f core
+
+docs:
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
+
+release_docs_spec:
+
+info:
+ @echo "INETS_DEBUG = $(INETS_DEBUG)"
+ @echo "INETS_FLAGS = $(INETS_FLAGS)"
+ @echo "ERL_COMPILE_FLAGS = $(ERL_COMPILE_FLAGS)"
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/ftp.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/ftp.erl
new file mode 100644
index 0000000000..be06ec654c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/ftp.erl
@@ -0,0 +1,1582 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: ftp.erl,v 1.2 2009/03/03 01:55:01 kostis Exp $
+%%
+-module(ftp).
+
+-behaviour(gen_server).
+
+%% This module implements an ftp client based on socket(3)/gen_tcp(3),
+%% file(3) and filename(3).
+%%
+
+
+-define(OPEN_TIMEOUT, 60*1000).
+-define(BYTE_TIMEOUT, 1000). % Timeout for _ONE_ byte to arrive. (ms)
+-define(OPER_TIMEOUT, 300). % Operation timeout (seconds)
+-define(FTP_PORT, 21).
+
+%% Client interface
+-export([cd/2, close/1, delete/2, formaterror/1, help/0,
+ lcd/2, lpwd/1, ls/1, ls/2,
+ mkdir/2, nlist/1, nlist/2,
+ open/1, open/2, open/3,
+ pwd/1,
+ recv/2, recv/3, recv_bin/2,
+ recv_chunk_start/2, recv_chunk/1,
+ rename/3, rmdir/2,
+ send/2, send/3, send_bin/3,
+ send_chunk_start/2, send_chunk/2, send_chunk_end/1,
+ type/2, user/3,user/4,account/2,
+ append/3, append/2, append_bin/3,
+ append_chunk/2, append_chunk_end/1, append_chunk_start/2]).
+
+%% Internal
+-export([init/1, handle_call/3, handle_cast/2,
+ handle_info/2, terminate/2,code_change/3]).
+
+
+%%
+%% CLIENT FUNCTIONS
+%%
+
+%% open(Host)
+%% open(Host, Flags)
+%%
+%% Purpose: Start an ftp client and connect to a host.
+%% Args: Host = string(),
+%% Port = integer(),
+%% Flags = [Flag],
+%% Flag = verbose | debug
+%% Returns: {ok, Pid} | {error, ehost}
+
+%%Tho only option was the host in textual form
+open({option_list,Option_list})->
+ %% Dbg = {debug,[trace,log,statistics]},
+ %% Options = [Dbg],
+ Options = [],
+ {ok,Pid1}=case lists:keysearch(flags,1,Option_list) of
+ {value,{flags,Flags}}->
+ {ok, Pid} = gen_server:start_link(?MODULE, [Flags], Options);
+ false ->
+ {ok, Pid} = gen_server:start_link(?MODULE, [], Options)
+ end,
+ gen_server:call(Pid1, {open, ip_comm,Option_list}, infinity);
+
+
+%%The only option was the tuple form of the ip-number
+open(Host)when tuple(Host) ->
+ open(Host, ?FTP_PORT, []);
+
+%%Host is the string form of the hostname
+open(Host)->
+ open(Host,?FTP_PORT,[]).
+
+
+
+open(Host, Port) when integer(Port) ->
+ open(Host,Port,[]);
+
+open(Host, Flags) when list(Flags) ->
+ open(Host,?FTP_PORT, Flags).
+
+open(Host,Port,Flags) when integer(Port), list(Flags) ->
+ %% Dbg = {debug,[trace,log,statistics]},
+ %% Options = [Dbg],
+ Options = [],
+ {ok, Pid} = gen_server:start_link(?MODULE, [Flags], Options),
+ gen_server:call(Pid, {open, ip_comm, Host, Port}, infinity).
+
+%% user(Pid, User, Pass)
+%% Purpose: Login.
+%% Args: Pid = pid(), User = Pass = string()
+%% Returns: ok | {error, euser} | {error, econn}
+user(Pid, User, Pass) ->
+ gen_server:call(Pid, {user, User, Pass}, infinity).
+
+%% user(Pid, User, Pass,Acc)
+%% Purpose: Login whith a supplied account name
+%% Args: Pid = pid(), User = Pass = Acc = string()
+%% Returns: ok | {error, euser} | {error, econn} | {error, eacct}
+user(Pid, User, Pass,Acc) ->
+ gen_server:call(Pid, {user, User, Pass,Acc}, infinity).
+
+%% account(Pid,Acc)
+%% Purpose: Set a user Account.
+%% Args: Pid = pid(), Acc= string()
+%% Returns: ok | {error, eacct}
+account(Pid,Acc) ->
+ gen_server:call(Pid, {account,Acc}, infinity).
+
+%% pwd(Pid)
+%%
+%% Purpose: Get the current working directory at remote server.
+%% Args: Pid = pid()
+%% Returns: {ok, Dir} | {error, elogin} | {error, econn}
+pwd(Pid) ->
+ gen_server:call(Pid, pwd, infinity).
+
+%% lpwd(Pid)
+%%
+%% Purpose: Get the current working directory at local server.
+%% Args: Pid = pid()
+%% Returns: {ok, Dir} | {error, elogin}
+lpwd(Pid) ->
+ gen_server:call(Pid, lpwd, infinity).
+
+%% cd(Pid, Dir)
+%%
+%% Purpose: Change current working directory at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+cd(Pid, Dir) ->
+ gen_server:call(Pid, {cd, Dir}, infinity).
+
+%% lcd(Pid, Dir)
+%%
+%% Purpose: Change current working directory for the local client.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath}
+lcd(Pid, Dir) ->
+ gen_server:call(Pid, {lcd, Dir}, infinity).
+
+%% ls(Pid)
+%% ls(Pid, Dir)
+%%
+%% Purpose: List the contents of current directory (ls/1) or directory
+%% Dir (ls/2) at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: {ok, Listing} | {error, epath} | {error, elogin} | {error, econn}
+ls(Pid) ->
+ ls(Pid, "").
+ls(Pid, Dir) ->
+ gen_server:call(Pid, {dir, long, Dir}, infinity).
+
+%% nlist(Pid)
+%% nlist(Pid, Dir)
+%%
+%% Purpose: List the contents of current directory (ls/1) or directory
+%% Dir (ls/2) at remote server. The returned list is a stream
+%% of file names.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: {ok, Listing} | {error, epath} | {error, elogin} | {error, econn}
+nlist(Pid) ->
+ nlist(Pid, "").
+nlist(Pid, Dir) ->
+ gen_server:call(Pid, {dir, short, Dir}, infinity).
+
+%% rename(Pid, CurrFile, NewFile)
+%%
+%% Purpose: Rename a file at remote server.
+%% Args: Pid = pid(), CurrFile = NewFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+rename(Pid, CurrFile, NewFile) ->
+ gen_server:call(Pid, {rename, CurrFile, NewFile}, infinity).
+
+%% delete(Pid, File)
+%%
+%% Purpose: Remove file at remote server.
+%% Args: Pid = pid(), File = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+delete(Pid, File) ->
+ gen_server:call(Pid, {delete, File}, infinity).
+
+%% mkdir(Pid, Dir)
+%%
+%% Purpose: Make directory at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+mkdir(Pid, Dir) ->
+ gen_server:call(Pid, {mkdir, Dir}, infinity).
+
+%% rmdir(Pid, Dir)
+%%
+%% Purpose: Remove directory at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+rmdir(Pid, Dir) ->
+ gen_server:call(Pid, {rmdir, Dir}, infinity).
+
+%% type(Pid, Type)
+%%
+%% Purpose: Set transfer type.
+%% Args: Pid = pid(), Type = ascii | binary
+%% Returns: ok | {error, etype} | {error, elogin} | {error, econn}
+type(Pid, Type) ->
+ gen_server:call(Pid, {type, Type}, infinity).
+
+%% recv(Pid, RFile [, LFile])
+%%
+%% Purpose: Transfer file from remote server.
+%% Args: Pid = pid(), RFile = LFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+recv(Pid, RFile) ->
+ recv(Pid, RFile, "").
+
+recv(Pid, RFile, LFile) ->
+ gen_server:call(Pid, {recv, RFile, LFile}, infinity).
+
+%% recv_bin(Pid, RFile)
+%%
+%% Purpose: Transfer file from remote server into binary.
+%% Args: Pid = pid(), RFile = string()
+%% Returns: {ok, Bin} | {error, epath} | {error, elogin} | {error, econn}
+recv_bin(Pid, RFile) ->
+ gen_server:call(Pid, {recv_bin, RFile}, infinity).
+
+%% recv_chunk_start(Pid, RFile)
+%%
+%% Purpose: Start receive of chunks of remote file.
+%% Args: Pid = pid(), RFile = string().
+%% Returns: ok | {error, elogin} | {error, epath} | {error, econn}
+recv_chunk_start(Pid, RFile) ->
+ gen_server:call(Pid, {recv_chunk_start, RFile}, infinity).
+
+
+%% recv_chunk(Pid, RFile)
+%%
+%% Purpose: Transfer file from remote server into binary in chunks
+%% Args: Pid = pid(), RFile = string()
+%% Returns: Reference
+recv_chunk(Pid) ->
+ gen_server:call(Pid, recv_chunk, infinity).
+
+%% send(Pid, LFile [, RFile])
+%%
+%% Purpose: Transfer file to remote server.
+%% Args: Pid = pid(), LFile = RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+send(Pid, LFile) ->
+ send(Pid, LFile, "").
+
+send(Pid, LFile, RFile) ->
+ gen_server:call(Pid, {send, LFile, RFile}, infinity).
+
+%% send_bin(Pid, Bin, RFile)
+%%
+%% Purpose: Transfer a binary to a remote file.
+%% Args: Pid = pid(), Bin = binary(), RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, enotbinary}
+%% | {error, econn}
+send_bin(Pid, Bin, RFile) when binary(Bin) ->
+ gen_server:call(Pid, {send_bin, Bin, RFile}, infinity);
+send_bin(Pid, Bin, RFile) ->
+ {error, enotbinary}.
+
+%% send_chunk_start(Pid, RFile)
+%%
+%% Purpose: Start transfer of chunks to remote file.
+%% Args: Pid = pid(), RFile = string().
+%% Returns: ok | {error, elogin} | {error, epath} | {error, econn}
+send_chunk_start(Pid, RFile) ->
+ gen_server:call(Pid, {send_chunk_start, RFile}, infinity).
+
+
+%% append_chunk_start(Pid, RFile)
+%%
+%% Purpose: Start append chunks of data to remote file.
+%% Args: Pid = pid(), RFile = string().
+%% Returns: ok | {error, elogin} | {error, epath} | {error, econn}
+append_chunk_start(Pid, RFile) ->
+ gen_server:call(Pid, {append_chunk_start, RFile}, infinity).
+
+
+%% send_chunk(Pid, Bin)
+%%
+%% Purpose: Send chunk to remote file.
+%% Args: Pid = pid(), Bin = binary().
+%% Returns: ok | {error, elogin} | {error, enotbinary} | {error, echunk}
+%% | {error, econn}
+send_chunk(Pid, Bin) when binary(Bin) ->
+ gen_server:call(Pid, {send_chunk, Bin}, infinity);
+send_chunk(Pid, Bin) ->
+ {error, enotbinary}.
+
+%%append_chunk(Pid, Bin)
+%%
+%% Purpose: Append chunk to remote file.
+%% Args: Pid = pid(), Bin = binary().
+%% Returns: ok | {error, elogin} | {error, enotbinary} | {error, echunk}
+%% | {error, econn}
+append_chunk(Pid, Bin) when binary(Bin) ->
+ gen_server:call(Pid, {append_chunk, Bin}, infinity);
+append_chunk(Pid, Bin) ->
+ {error, enotbinary}.
+
+%% send_chunk_end(Pid)
+%%
+%% Purpose: End sending of chunks to remote file.
+%% Args: Pid = pid().
+%% Returns: ok | {error, elogin} | {error, echunk} | {error, econn}
+send_chunk_end(Pid) ->
+ gen_server:call(Pid, send_chunk_end, infinity).
+
+%% append_chunk_end(Pid)
+%%
+%% Purpose: End appending of chunks to remote file.
+%% Args: Pid = pid().
+%% Returns: ok | {error, elogin} | {error, echunk} | {error, econn}
+append_chunk_end(Pid) ->
+ gen_server:call(Pid, append_chunk_end, infinity).
+
+%% append(Pid, LFile,RFile)
+%%
+%% Purpose: Append the local file to the remote file
+%% Args: Pid = pid(), LFile = RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+append(Pid, LFile) ->
+ append(Pid, LFile, "").
+
+append(Pid, LFile, RFile) ->
+ gen_server:call(Pid, {append, LFile, RFile}, infinity).
+
+%% append_bin(Pid, Bin, RFile)
+%%
+%% Purpose: Append a binary to a remote file.
+%% Args: Pid = pid(), Bin = binary(), RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, enotbinary}
+%% | {error, econn}
+append_bin(Pid, Bin, RFile) when binary(Bin) ->
+ gen_server:call(Pid, {append_bin, Bin, RFile}, infinity);
+append_bin(Pid, Bin, RFile) ->
+ {error, enotbinary}.
+
+
+%% close(Pid)
+%%
+%% Purpose: End the ftp session.
+%% Args: Pid = pid()
+%% Returns: ok
+close(Pid) ->
+ case (catch gen_server:call(Pid, close, 30000)) of
+ ok ->
+ ok;
+ {'EXIT',{noproc,_}} ->
+ %% Already gone...
+ ok;
+ Res ->
+ Res
+ end.
+
+%% formaterror(Tag)
+%%
+%% Purpose: Return diagnostics.
+%% Args: Tag = atom() | {error, atom()}
+%% Returns: string().
+formaterror(Tag) ->
+ errstr(Tag).
+
+%% help()
+%%
+%% Purpose: Print list of valid commands.
+%%
+%% Undocumented.
+%%
+help() ->
+ io:format("\n Commands:\n"
+ " ---------\n"
+ " cd(Pid, Dir)\n"
+ " close(Pid)\n"
+ " delete(Pid, File)\n"
+ " formaterror(Tag)\n"
+ " help()\n"
+ " lcd(Pid, Dir)\n"
+ " lpwd(Pid)\n"
+ " ls(Pid [, Dir])\n"
+ " mkdir(Pid, Dir)\n"
+ " nlist(Pid [, Dir])\n"
+ " open(Host [Port, Flags])\n"
+ " pwd(Pid)\n"
+ " recv(Pid, RFile [, LFile])\n"
+ " recv_bin(Pid, RFile)\n"
+ " recv_chunk_start(Pid, RFile)\n"
+ " recv_chunk(Pid)\n"
+ " rename(Pid, CurrFile, NewFile)\n"
+ " rmdir(Pid, Dir)\n"
+ " send(Pid, LFile [, RFile])\n"
+ " send_chunk(Pid, Bin)\n"
+ " send_chunk_start(Pid, RFile)\n"
+ " send_chunk_end(Pid)\n"
+ " send_bin(Pid, Bin, RFile)\n"
+ " append(Pid, LFile [, RFile])\n"
+ " append_chunk(Pid, Bin)\n"
+ " append_chunk_start(Pid, RFile)\n"
+ " append_chunk_end(Pid)\n"
+ " append_bin(Pid, Bin, RFile)\n"
+ " type(Pid, Type)\n"
+ " account(Pid,Account)\n"
+ " user(Pid, User, Pass)\n"
+ " user(Pid, User, Pass,Account)\n").
+
+%%
+%% INIT
+%%
+
+-record(state, {csock = undefined, dsock = undefined, flags = undefined,
+ ldir = undefined, type = undefined, chunk = false,
+ pending = undefined}).
+
+init([Flags]) ->
+ sock_start(),
+ put(debug,get_debug(Flags)),
+ put(verbose,get_verbose(Flags)),
+ process_flag(priority, low),
+ {ok, LDir} = file:get_cwd(),
+ {ok, #state{flags = Flags, ldir = LDir}}.
+
+%%
+%% HANDLERS
+%%
+
+%% First group of reply code digits
+-define(POS_PREL, 1).
+-define(POS_COMPL, 2).
+-define(POS_INTERM, 3).
+-define(TRANS_NEG_COMPL, 4).
+-define(PERM_NEG_COMPL, 5).
+
+%% Second group of reply code digits
+-define(SYNTAX,0).
+-define(INFORMATION,1).
+-define(CONNECTION,2).
+-define(AUTH_ACC,3).
+-define(UNSPEC,4).
+-define(FILE_SYSTEM,5).
+
+
+-define(STOP_RET(E),{stop, normal, {error, E},
+ State#state{csock = undefined}}).
+
+
+rescode(?POS_PREL,_,_) -> pos_prel; %%Positive Preleminary Reply
+rescode(?POS_COMPL,_,_) -> pos_compl; %%Positive Completion Reply
+rescode(?POS_INTERM,?AUTH_ACC,2) -> pos_interm_acct; %%Positive Intermediate Reply nedd account
+rescode(?POS_INTERM,_,_) -> pos_interm; %%Positive Intermediate Reply
+rescode(?TRANS_NEG_COMPL,?FILE_SYSTEM,2) -> trans_no_space; %%No storage area no action taken
+rescode(?TRANS_NEG_COMPL,_,_) -> trans_neg_compl;%%Temporary Error, no action taken
+rescode(?PERM_NEG_COMPL,?FILE_SYSTEM,2) -> perm_no_space; %%Permanent disk space error, the user shall not try again
+rescode(?PERM_NEG_COMPL,?FILE_SYSTEM,3) -> perm_fname_not_allowed;
+rescode(?PERM_NEG_COMPL,_,_) -> perm_neg_compl.
+
+retcode(trans_no_space,_) -> etnospc;
+retcode(perm_no_space,_) -> epnospc;
+retcode(perm_fname_not_allowed,_) -> efnamena;
+retcode(_,Otherwise) -> Otherwise.
+
+handle_call({open,ip_comm,Conn_data},From,State) ->
+ case lists:keysearch(host,1,Conn_data) of
+ {value,{host,Host}}->
+ Port=get_key1(port,Conn_data,?FTP_PORT),
+ Timeout=get_key1(timeout,Conn_data,?OPEN_TIMEOUT),
+ open(Host,Port,Timeout,State);
+ false ->
+ ehost
+ end;
+
+handle_call({open,ip_comm,Host,Port},From,State) ->
+ open(Host,Port,?OPEN_TIMEOUT,State);
+
+handle_call({user, User, Pass}, _From, State) ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "USER ~s", [User]) of
+ pos_interm ->
+ case ctrl_cmd(CSock, "PASS ~s", [Pass]) of
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error,enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+
+handle_call({user, User, Pass,Acc}, _From, State) ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "USER ~s", [User]) of
+ pos_interm ->
+ case ctrl_cmd(CSock, "PASS ~s", [Pass]) of
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ pos_interm_acct->
+ case ctrl_cmd(CSock,"ACCT ~s",[Acc]) of
+ pos_compl->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error,enotconn}->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, eacct}, State}
+ end;
+ {error,enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+
+%%set_account(Acc,State)->Reply
+%%Reply={reply, {error, euser}, State} | {error,enotconn}->
+handle_call({account,Acc},_From,State)->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock,"ACCT ~s",[Acc]) of
+ pos_compl->
+ {reply, ok,State};
+ {error,enotconn}->
+ ?STOP_RET(econn);
+ Error ->
+ debug(" error: ~p",[Error]),
+ {reply, {error, eacct}, State}
+ end;
+
+handle_call(pwd, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ %%
+ %% NOTE: The directory string comes over the control connection.
+ case sock_write(CSock, mk_cmd("PWD", [])) of
+ ok ->
+ {_, Line} = result_line(CSock),
+ {_, Cs} = split($", Line), % XXX Ugly
+ {Dir0, _} = split($", Cs),
+ Dir = lists:delete($", Dir0),
+ {reply, {ok, Dir}, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn)
+ end;
+
+handle_call(lpwd, _From, State) ->
+ #state{csock = CSock, ldir = LDir} = State,
+ {reply, {ok, LDir}, State};
+
+handle_call({cd, Dir}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "CWD ~s", [Dir]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({lcd, Dir}, _From, State) ->
+ #state{csock = CSock, ldir = LDir0} = State,
+ LDir = absname(LDir0, Dir),
+ case file:read_file_info(LDir) of
+ {ok, _ } ->
+ {reply, ok, State#state{ldir = LDir}};
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({dir, Len, Dir}, _From, State) when State#state.chunk == false ->
+ debug(" dir : ~p: ~s~n",[Len,Dir]),
+ #state{csock = CSock, type = Type} = State,
+ set_type(ascii, Type, CSock),
+ LSock = listen_data(CSock, raw),
+ Cmd = case Len of
+ short -> "NLST";
+ long -> "LIST"
+ end,
+ Result = case Dir of
+ "" ->
+ ctrl_cmd(CSock, Cmd, "");
+ _ ->
+ ctrl_cmd(CSock, Cmd ++ " ~s", [Dir])
+ end,
+ debug(" ctrl : command result: ~p~n",[Result]),
+ case Result of
+ pos_prel ->
+ debug(" dbg : await the data connection", []),
+ DSock = accept_data(LSock),
+ debug(" dbg : await the data", []),
+ Reply0 =
+ case recv_data(DSock) of
+ {ok, DirData} ->
+ debug(" data : DirData: ~p~n",[DirData]),
+ case result(CSock) of
+ pos_compl ->
+ {ok, DirData};
+ _ ->
+ {error, epath}
+ end;
+ {error, Reason} ->
+ sock_close(DSock),
+ verbose(" data : error: ~p, ~p~n",[Reason, result(CSock)]),
+ {error, epath}
+ end,
+
+ debug(" ctrl : reply: ~p~n",[Reply0]),
+ reset_type(ascii, Type, CSock),
+ {reply, Reply0, State};
+ {closed, _Why} ->
+ ?STOP_RET(econn);
+ _ ->
+ sock_close(LSock),
+ {reply, {error, epath}, State}
+ end;
+
+
+handle_call({rename, CurrFile, NewFile}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "RNFR ~s", [CurrFile]) of
+ pos_interm ->
+ case ctrl_cmd(CSock, "RNTO ~s", [NewFile]) of
+ pos_compl ->
+ {reply, ok, State};
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({delete, File}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "DELE ~s", [File]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({mkdir, Dir}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "MKD ~s", [Dir]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({rmdir, Dir}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "RMD ~s", [Dir]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({type, Type}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case Type of
+ ascii ->
+ set_type(ascii, CSock),
+ {reply, ok, State#state{type = ascii}};
+ binary ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ _ ->
+ {reply, {error, etype}, State}
+ end;
+
+handle_call({recv, RFile, LFile}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock, ldir = LDir} = State,
+ ALFile = case LFile of
+ "" ->
+ absname(LDir, RFile);
+ _ ->
+ absname(LDir, LFile)
+ end,
+ case file_open(ALFile, write) of
+ {ok, Fd} ->
+ LSock = listen_data(CSock, binary),
+ Ret = case ctrl_cmd(CSock, "RETR ~s", [RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ recv_file(DSock, Fd),
+ Reply0 = case result(CSock) of
+ pos_compl ->
+ ok;
+ _ ->
+ {error, epath}
+ end,
+ sock_close(DSock),
+ {reply, Reply0, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end,
+ file_close(Fd),
+ Ret;
+ {error, _What} ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({recv_bin, RFile}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock, ldir = LDir} = State,
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "RETR ~s", [RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ Reply = recv_binary(DSock,CSock),
+ sock_close(DSock),
+ {reply, Reply, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+
+handle_call({recv_chunk_start, RFile}, _From, State)
+ when State#state.chunk == false ->
+ start_chunk_transfer("RETR",RFile,State);
+
+handle_call(recv_chunk, _From, State)
+ when State#state.chunk == true ->
+ do_recv_chunk(State);
+
+
+handle_call({send, LFile, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_file("STOR",LFile,RFile,State);
+
+handle_call({append, LFile, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_file("APPE",LFile,RFile,State);
+
+
+handle_call({send_bin, Bin, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_data("STOR",Bin,RFile,State);
+
+handle_call({append_bin, Bin, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_data("APPE",Bin,RFile,State);
+
+
+
+handle_call({send_chunk_start, RFile}, _From, State)
+ when State#state.chunk == false ->
+ start_chunk_transfer("STOR",RFile,State);
+
+handle_call({append_chunk_start,RFile},_From,State)
+ when State#state.chunk==false->
+ start_chunk_transfer("APPE",RFile,State);
+
+handle_call({send_chunk, Bin}, _From, State)
+ when State#state.chunk == true ->
+ chunk_transfer(Bin,State);
+
+handle_call({append_chunk, Bin}, _From, State)
+ when State#state.chunk == true ->
+ chunk_transfer(Bin,State);
+
+handle_call(append_chunk_end, _From, State)
+ when State#state.chunk == true ->
+ end_chunk_transfer(State);
+
+handle_call(send_chunk_end, _From, State)
+ when State#state.chunk == true ->
+ end_chunk_transfer(State);
+
+
+
+handle_call(close, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ ctrl_cmd(CSock, "QUIT", []),
+ sock_close(CSock),
+ {stop, normal, ok, State};
+
+handle_call(_, _From, State) when State#state.chunk == true ->
+ {reply, {error, echunk}, State}.
+
+
+handle_cast(Msg, State) ->
+ {noreply, State}.
+
+
+handle_info({Sock, {fromsocket, Bytes}}, State) when Sock == State#state.csock ->
+ put(leftovers, Bytes ++ leftovers()),
+ {noreply, State};
+
+%% Data connection closed (during chunk sending)
+handle_info({Sock, {socket_closed, _Reason}}, State) when Sock == State#state.dsock ->
+ {noreply, State#state{dsock = undefined}};
+
+%% Control connection closed.
+handle_info({Sock, {socket_closed, _Reason}}, State) when Sock == State#state.csock ->
+ debug(" sc : ~s~n",[leftovers()]),
+ {stop, ftp_server_close, State#state{csock = undefined}};
+
+handle_info(Info, State) ->
+ error_logger:info_msg("ftp : ~w : Unexpected message: ~w\n", [self(),Info]),
+ {noreply, State}.
+
+code_change(OldVsn,State,Extra)->
+ {ok,State}.
+
+terminate(Reason, State) ->
+ ok.
+%%
+%% OPEN CONNECTION
+%%
+open(Host,Port,Timeout,State)->
+ case sock_connect(Host,Port,Timeout) of
+ {error, What} ->
+ {stop, normal, {error, What}, State};
+ CSock ->
+ case result(CSock, State#state.flags) of
+ {error,Reason} ->
+ sock_close(CSock),
+ {stop,normal,{error,Reason},State};
+ _ -> % We should really check this...
+ {reply, {ok, self()}, State#state{csock = CSock}}
+ end
+ end.
+
+
+
+%%
+%% CONTROL CONNECTION
+%%
+
+ctrl_cmd(CSock, Fmt, Args) ->
+ Cmd = mk_cmd(Fmt, Args),
+ case sock_write(CSock, Cmd) of
+ ok ->
+ debug(" cmd : ~s",[Cmd]),
+ result(CSock);
+ {error, enotconn} ->
+ {error, enotconn};
+ Other ->
+ Other
+ end.
+
+mk_cmd(Fmt, Args) ->
+ [io_lib:format(Fmt, Args)| "\r\n"]. % Deep list ok.
+
+%%
+%% TRANSFER TYPE
+%%
+
+%%
+%% set_type(NewType, CurrType, CSock)
+%% reset_type(NewType, CurrType, CSock)
+%%
+set_type(Type, Type, CSock) ->
+ ok;
+set_type(NewType, _OldType, CSock) ->
+ set_type(NewType, CSock).
+
+reset_type(Type, Type, CSock) ->
+ ok;
+reset_type(_NewType, OldType, CSock) ->
+ set_type(OldType, CSock).
+
+set_type(ascii, CSock) ->
+ ctrl_cmd(CSock, "TYPE A", []);
+set_type(binary, CSock) ->
+ ctrl_cmd(CSock, "TYPE I", []).
+
+%%
+%% DATA CONNECTION
+%%
+
+%% Create a listen socket for a data connection and send a PORT command
+%% containing the IP address and port number. Mode is binary or raw.
+%%
+listen_data(CSock, Mode) ->
+ {IP, _} = sock_name(CSock), % IP address of control conn.
+ LSock = sock_listen(Mode, IP),
+ Port = sock_listen_port(LSock),
+ {A1, A2, A3, A4} = IP,
+ {P1, P2} = {Port div 256, Port rem 256},
+ ctrl_cmd(CSock, "PORT ~w,~w,~w,~w,~w,~w", [A1, A2, A3, A4, P1, P2]),
+ LSock.
+
+%%
+%% Accept the data connection and close the listen socket.
+%%
+accept_data(LSock) ->
+ Sock = sock_accept(LSock),
+ sock_close(LSock),
+ Sock.
+
+%%
+%% DATA COLLECTION (ls, dir)
+%%
+%% Socket is a byte stream in ASCII mode.
+%%
+
+%% Receive data (from data connection).
+recv_data(Sock) ->
+ recv_data(Sock, [], 0).
+recv_data(Sock, Sofar, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ {ok, lists:flatten(lists:reverse(Sofar))};
+recv_data(Sock, Sofar, Retry) ->
+ case sock_read(Sock) of
+ {ok, Data} ->
+ debug(" dbg : received some data: ~n~s", [Data]),
+ recv_data(Sock, [Data| Sofar], 0);
+ {error, timeout} ->
+ %% Retry..
+ recv_data(Sock, Sofar, Retry+1);
+ {error, Reason} ->
+ SoFar1 = lists:flatten(lists:reverse(Sofar)),
+ {error, {socket_error, Reason, SoFar1, Retry}};
+ {closed, _} ->
+ {ok, lists:flatten(lists:reverse(Sofar))}
+ end.
+
+%%
+%% BINARY TRANSFER
+%%
+
+%% --------------------------------------------------
+
+%% recv_binary(DSock,CSock) = {ok,Bin} | {error,Reason}
+%%
+recv_binary(DSock,CSock) ->
+ recv_binary1(recv_binary2(DSock,[],0),CSock).
+
+recv_binary1(Reply,Sock) ->
+ case result(Sock) of
+ pos_compl -> Reply;
+ _ -> {error, epath}
+ end.
+
+recv_binary2(Sock, _Bs, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ {error,eclosed};
+recv_binary2(Sock, Bs, Retry) ->
+ case sock_read(Sock) of
+ {ok, Bin} ->
+ recv_binary2(Sock, [Bs, Bin], 0);
+ {error, timeout} ->
+ recv_binary2(Sock, Bs, Retry+1);
+ {closed, _Why} ->
+ {ok,list_to_binary(Bs)}
+ end.
+
+%% --------------------------------------------------
+
+%%
+%% recv_chunk
+%%
+
+do_recv_chunk(#state{dsock = undefined} = State) ->
+ {reply, {error,econn}, State};
+do_recv_chunk(State) ->
+ recv_chunk1(recv_chunk2(State, 0), State).
+
+recv_chunk1({ok, _Bin} = Reply, State) ->
+ {reply, Reply, State};
+%% Reply = ok | {error, Reason}
+recv_chunk1(Reply, #state{csock = CSock} = State) ->
+ State1 = State#state{dsock = undefined, chunk = false},
+ case result(CSock) of
+ pos_compl ->
+ {reply, Reply, State1};
+ _ ->
+ {reply, {error, epath}, State1}
+ end.
+
+recv_chunk2(#state{dsock = DSock} = State, ?OPER_TIMEOUT) ->
+ sock_close(DSock),
+ {error, eclosed};
+recv_chunk2(#state{dsock = DSock} = State, Retry) ->
+ case sock_read(DSock) of
+ {ok, Bin} ->
+ {ok, Bin};
+ {error, timeout} ->
+ recv_chunk2(State, Retry+1);
+ {closed, Reason} ->
+ debug(" dbg : socket closed: ~p", [Reason]),
+ ok
+ end.
+
+
+%% --------------------------------------------------
+
+%%
+%% FILE TRANSFER
+%%
+
+recv_file(Sock, Fd) ->
+ recv_file(Sock, Fd, 0).
+
+recv_file(Sock, Fd, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ {closed, timeout};
+recv_file(Sock, Fd, Retry) ->
+ case sock_read(Sock) of
+ {ok, Bin} ->
+ file_write(Fd, Bin),
+ recv_file(Sock, Fd);
+ {error, timeout} ->
+ recv_file(Sock, Fd, Retry+1);
+% {error, Reason} ->
+% SoFar1 = lists:flatten(lists:reverse(Sofar)),
+% exit({socket_error, Reason, Sock, SoFar1, Retry});
+ {closed, How} ->
+ {closed, How}
+ end.
+
+%%
+%% send_file(Fd, Sock) = ok | {error, Why}
+%%
+
+send_file(Fd, Sock) ->
+ {N, Bin} = file_read(Fd),
+ if
+ N > 0 ->
+ case sock_write(Sock, Bin) of
+ ok ->
+ send_file(Fd, Sock);
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ true ->
+ ok
+ end.
+
+
+
+%%
+%% PARSING OF RESULT LINES
+%%
+
+%% Excerpt from RFC 959:
+%%
+%% "A reply is defined to contain the 3-digit code, followed by Space
+%% <SP>, followed by one line of text (where some maximum line length
+%% has been specified), and terminated by the Telnet end-of-line
+%% code. There will be cases however, where the text is longer than
+%% a single line. In these cases the complete text must be bracketed
+%% so the User-process knows when it may stop reading the reply (i.e.
+%% stop processing input on the control connection) and go do other
+%% things. This requires a special format on the first line to
+%% indicate that more than one line is coming, and another on the
+%% last line to designate it as the last. At least one of these must
+%% contain the appropriate reply code to indicate the state of the
+%% transaction. To satisfy all factions, it was decided that both
+%% the first and last line codes should be the same.
+%%
+%% Thus the format for multi-line replies is that the first line
+%% will begin with the exact required reply code, followed
+%% immediately by a Hyphen, "-" (also known as Minus), followed by
+%% text. The last line will begin with the same code, followed
+%% immediately by Space <SP>, optionally some text, and the Telnet
+%% end-of-line code.
+%%
+%% For example:
+%% 123-First line
+%% Second line
+%% 234 A line beginning with numbers
+%% 123 The last line
+%%
+%% The user-process then simply needs to search for the second
+%% occurrence of the same reply code, followed by <SP> (Space), at
+%% the beginning of a line, and ignore all intermediary lines. If
+%% an intermediary line begins with a 3-digit number, the Server
+%% must pad the front to avoid confusion.
+%%
+%% This scheme allows standard system routines to be used for
+%% reply information (such as for the STAT reply), with
+%% "artificial" first and last lines tacked on. In rare cases
+%% where these routines are able to generate three digits and a
+%% Space at the beginning of any line, the beginning of each
+%% text line should be offset by some neutral text, like Space.
+%%
+%% This scheme assumes that multi-line replies may not be nested."
+
+%% We have to collect the stream of result characters into lines (ending
+%% in "\r\n"; we check for "\n"). When a line is assembled, left-over
+%% characters are saved in the process dictionary.
+%%
+
+%% result(Sock) = rescode()
+%%
+result(Sock) ->
+ result(Sock, false).
+
+result_line(Sock) ->
+ result(Sock, true).
+
+%% result(Sock, Bool) = {error,Reason} | rescode() | {rescode(), Lines}
+%% Printout if Bool = true.
+%%
+result(Sock, RetForm) ->
+ case getline(Sock) of
+ Line when length(Line) > 3 ->
+ [D1, D2, D3| Tail] = Line,
+ case Tail of
+ [$-| _] ->
+ parse_to_end(Sock, [D1, D2, D3, $ ]); % 3 digits + space
+ _ ->
+ ok
+ end,
+ result(D1,D2,D3,Line,RetForm);
+ _ ->
+ retform(rescode(?PERM_NEG_COMPL,-1,-1),[],RetForm)
+ end.
+
+result(D1,_D2,_D3,Line,_RetForm) when D1 - $0 > 10 ->
+ {error,{invalid_server_response,Line}};
+result(D1,_D2,_D3,Line,_RetForm) when D1 - $0 < 0 ->
+ {error,{invalid_server_response,Line}};
+result(D1,D2,D3,Line,RetForm) ->
+ Res1 = D1 - $0,
+ Res2 = D2 - $0,
+ Res3 = D3 - $0,
+ verbose(" ~w : ~s", [Res1, Line]),
+ retform(rescode(Res1,Res2,Res3),Line,RetForm).
+
+retform(ResCode,Line,true) ->
+ {ResCode,Line};
+retform(ResCode,_,_) ->
+ ResCode.
+
+leftovers() ->
+ case get(leftovers) of
+ undefined -> [];
+ X -> X
+ end.
+
+%% getline(Sock) = Line
+%%
+getline(Sock) ->
+ getline(Sock, leftovers()).
+
+getline(Sock, Rest) ->
+ getline1(Sock, split($\n, Rest), 0).
+
+getline1(Sock, {[], Rest}, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ put(leftovers, Rest),
+ [];
+getline1(Sock, {[], Rest}, Retry) ->
+ case sock_read(Sock) of
+ {ok, More} ->
+ debug(" read : ~s~n",[More]),
+ getline(Sock, Rest ++ More);
+ {error, timeout} ->
+ %% Retry..
+ getline1(Sock, {[], Rest}, Retry+1);
+ Error ->
+ put(leftovers, Rest),
+ []
+ end;
+getline1(Sock, {Line, Rest}, Retry) ->
+ put(leftovers, Rest),
+ Line.
+
+parse_to_end(Sock, Prefix) ->
+ Line = getline(Sock),
+ case lists:prefix(Prefix, Line) of
+ false ->
+ parse_to_end(Sock, Prefix);
+ true ->
+ ok
+ end.
+
+
+%% Split list after first occurence of S.
+%% Returns {Prefix, Suffix} ({[], Cs} if S not found).
+split(S, Cs) ->
+ split(S, Cs, []).
+
+split(S, [S| Cs], As) ->
+ {lists:reverse([S|As]), Cs};
+split(S, [C| Cs], As) ->
+ split(S, Cs, [C| As]);
+split(_, [], As) ->
+ {[], lists:reverse(As)}.
+
+%%
+%% FILE INTERFACE
+%%
+%% All files are opened raw in binary mode.
+%%
+-define(BUFSIZE, 4096).
+
+file_open(File, Option) ->
+ file:open(File, [raw, binary, Option]).
+
+file_close(Fd) ->
+ file:close(Fd).
+
+
+file_read(Fd) -> % Compatible with pre R2A.
+ case file:read(Fd, ?BUFSIZE) of
+ {ok, {N, Bytes}} ->
+ {N, Bytes};
+ {ok, Bytes} ->
+ {size(Bytes), Bytes};
+ eof ->
+ {0, []}
+ end.
+
+file_write(Fd, Bytes) ->
+ file:write(Fd, Bytes).
+
+absname(Dir, File) -> % Args swapped.
+ filename:absname(File, Dir).
+
+
+
+%% sock_start()
+%%
+
+%%
+%% USE GEN_TCP
+%%
+
+sock_start() ->
+ inet_db:start().
+
+%%
+%% Connect to FTP server at Host (default is TCP port 21) in raw mode,
+%% in order to establish a control connection.
+%%
+
+sock_connect(Host,Port,TimeOut) ->
+ debug(" info : connect to server on ~p:~p~n",[Host,Port]),
+ Opts = [{packet, 0}, {active, false}],
+ case (catch gen_tcp:connect(Host, Port, Opts,TimeOut)) of
+ {'EXIT', R1} -> % XXX Probably no longer needed.
+ debug(" error: socket connectionn failed with exit reason:"
+ "~n ~p",[R1]),
+ {error, ehost};
+ {error, R2} ->
+ debug(" error: socket connectionn failed with exit reason:"
+ "~n ~p",[R2]),
+ {error, ehost};
+ {ok, Sock} ->
+ Sock
+ end.
+
+%%
+%% Create a listen socket (any port) in binary or raw non-packet mode for
+%% data connection.
+%%
+sock_listen(Mode, IP) ->
+ Opts = case Mode of
+ binary ->
+ [binary, {packet, 0}];
+ raw ->
+ [{packet, 0}]
+ end,
+ {ok, Sock} = gen_tcp:listen(0, [{ip, IP}, {active, false} | Opts]),
+ Sock.
+
+sock_accept(LSock) ->
+ {ok, Sock} = gen_tcp:accept(LSock),
+ Sock.
+
+sock_close(undefined) ->
+ ok;
+sock_close(Sock) ->
+ gen_tcp:close(Sock).
+
+sock_read(Sock) ->
+ case gen_tcp:recv(Sock, 0, ?BYTE_TIMEOUT) of
+ {ok, Bytes} ->
+ {ok, Bytes};
+
+ {error, closed} ->
+ {closed, closed}; % Yes
+
+ %% --- OTP-4770 begin ---
+ %%
+ %% This seems to happen on windows
+ %% "Someone" tried to close an already closed socket...
+ %%
+
+ {error, enotsock} ->
+ {closed, enotsock};
+
+ %%
+ %% --- OTP-4770 end ---
+
+ {error, etimedout} ->
+ {error, timeout};
+
+ Other ->
+ Other
+ end.
+
+%% receive
+%% {tcp, Sock, Bytes} ->
+%% {ok, Bytes};
+%% {tcp_closed, Sock} ->
+%% {closed, closed}
+%% end.
+
+sock_write(Sock, Bytes) ->
+ gen_tcp:send(Sock, Bytes).
+
+sock_name(Sock) ->
+ {ok, {IP, Port}} = inet:sockname(Sock),
+ {IP, Port}.
+
+sock_listen_port(LSock) ->
+ {ok, Port} = inet:port(LSock),
+ Port.
+
+
+%%
+%% ERROR STRINGS
+%%
+errstr({error, Reason}) ->
+ errstr(Reason);
+
+errstr(echunk) -> "Synchronisation error during chung sending.";
+errstr(eclosed) -> "Session has been closed.";
+errstr(econn) -> "Connection to remote server prematurely closed.";
+errstr(eexists) ->"File or directory already exists.";
+errstr(ehost) -> "Host not found, FTP server not found, "
+"or connection rejected.";
+errstr(elogin) -> "User not logged in.";
+errstr(enotbinary) -> "Term is not a binary.";
+errstr(epath) -> "No such file or directory, already exists, "
+"or permission denied.";
+errstr(etype) -> "No such type.";
+errstr(euser) -> "User name or password not valid.";
+errstr(etnospc) -> "Insufficient storage space in system.";
+errstr(epnospc) -> "Exceeded storage allocation "
+"(for current directory or dataset).";
+errstr(efnamena) -> "File name not allowed.";
+errstr(Reason) ->
+ lists:flatten(io_lib:format("Unknown error: ~w", [Reason])).
+
+
+
+%% ----------------------------------------------------------
+
+get_verbose(Params) -> check_param(verbose,Params).
+
+get_debug(Flags) -> check_param(debug,Flags).
+
+check_param(P,Ps) -> lists:member(P,Ps).
+
+
+%% verbose -> ok
+%%
+%% Prints the string if the Flags list is non-epmty
+%%
+%% Params: F Format string
+%% A Arguments to the format string
+%%
+verbose(F,A) -> verbose(get(verbose),F,A).
+
+verbose(true,F,A) -> print(F,A);
+verbose(_,_F,_A) -> ok.
+
+
+
+
+%% debug -> ok
+%%
+%% Prints the string if debug enabled
+%%
+%% Params: F Format string
+%% A Arguments to the format string
+%%
+debug(F,A) -> debug(get(debug),F,A).
+
+debug(true,F,A) -> print(F,A);
+debug(_,_F,_A) -> ok.
+
+
+print(F,A) -> io:format(F,A).
+
+
+
+transfer_file(Cmd,LFile,RFile,State)->
+ #state{csock = CSock, ldir = LDir} = State,
+ ARFile = case RFile of
+ "" ->
+ LFile;
+ _ ->
+ RFile
+ end,
+ ALFile = absname(LDir, LFile),
+ case file_open(ALFile, read) of
+ {ok, Fd} ->
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "~s ~s", [Cmd,ARFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ SFreply = send_file(Fd, DSock),
+ file_close(Fd),
+ sock_close(DSock),
+ case {SFreply,result(CSock)} of
+ {ok,pos_compl} ->
+ {reply, ok, State};
+ {ok,Other} ->
+ debug(" error: unknown reply: ~p~n",[Other]),
+ {reply, {error, epath}, State};
+ {{error,Why},Result} ->
+ ?STOP_RET(retcode(Result,econn))
+ end;
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ Other ->
+ debug(" error: ctrl failed: ~p~n",[Other]),
+ {reply, {error, epath}, State}
+ end;
+ {error, Reason} ->
+ debug(" error: file open: ~p~n",[Reason]),
+ {reply, {error, epath}, State}
+ end.
+
+transfer_data(Cmd,Bin,RFile,State)->
+ #state{csock = CSock, ldir = LDir} = State,
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "~s ~s", [Cmd,RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ SReply = sock_write(DSock, Bin),
+ sock_close(DSock),
+ case {SReply,result(CSock)} of
+ {ok,pos_compl} ->
+ {reply, ok, State};
+ {ok,trans_no_space} ->
+ ?STOP_RET(etnospc);
+ {ok,perm_no_space} ->
+ ?STOP_RET(epnospc);
+ {ok,perm_fname_not_allowed} ->
+ ?STOP_RET(efnamena);
+ {ok,Other} ->
+ debug(" error: unknown reply: ~p~n",[Other]),
+ {reply, {error, epath}, State};
+ {{error,Why},Result} ->
+ ?STOP_RET(retcode(Result,econn))
+ %% {{error,_Why},_Result} ->
+ %% ?STOP_RET(econn)
+ end;
+
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+
+ Other ->
+ debug(" error: ctrl failed: ~p~n",[Other]),
+ {reply, {error, epath}, State}
+ end.
+
+
+start_chunk_transfer(Cmd, RFile, #state{csock = CSock} = State) ->
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "~s ~s", [Cmd,RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ {reply, ok, State#state{dsock = DSock, chunk = true}};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ Otherwise ->
+ debug(" error: ctrl failed: ~p~n",[Otherwise]),
+ {reply, {error, epath}, State}
+ end.
+
+
+chunk_transfer(Bin,State)->
+ #state{dsock = DSock, csock = CSock} = State,
+ case DSock of
+ undefined ->
+ {reply,{error,econn},State};
+ _ ->
+ case sock_write(DSock, Bin) of
+ ok ->
+ {reply, ok, State};
+ Other ->
+ debug(" error: chunk write error: ~p~n",[Other]),
+ {reply, {error, econn}, State#state{dsock = undefined}}
+ end
+ end.
+
+
+
+end_chunk_transfer(State)->
+ #state{csock = CSock, dsock = DSock} = State,
+ case DSock of
+ undefined ->
+ Result = result(CSock),
+ case Result of
+ pos_compl ->
+ {reply,ok,State#state{dsock = undefined,
+ chunk = false}};
+ trans_no_space ->
+ ?STOP_RET(etnospc);
+ perm_no_space ->
+ ?STOP_RET(epnospc);
+ perm_fname_not_allowed ->
+ ?STOP_RET(efnamena);
+ Result ->
+ debug(" error: send chunk end (1): ~p~n",
+ [Result]),
+ {reply,{error,epath},State#state{dsock = undefined,
+ chunk = false}}
+ end;
+ _ ->
+ sock_close(DSock),
+ Result = result(CSock),
+ case Result of
+ pos_compl ->
+ {reply,ok,State#state{dsock = undefined,
+ chunk = false}};
+ trans_no_space ->
+ sock_close(CSock),
+ ?STOP_RET(etnospc);
+ perm_no_space ->
+ sock_close(CSock),
+ ?STOP_RET(epnospc);
+ perm_fname_not_allowed ->
+ sock_close(CSock),
+ ?STOP_RET(efnamena);
+ Result ->
+ debug(" error: send chunk end (2): ~p~n",
+ [Result]),
+ {reply,{error,epath},State#state{dsock = undefined,
+ chunk = false}}
+ end
+ end.
+
+get_key1(Key,List,Default)->
+ case lists:keysearch(Key,1,List)of
+ {value,{_,Val}}->
+ Val;
+ false->
+ Default
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.erl
new file mode 100644
index 0000000000..764e7fb092
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.erl
@@ -0,0 +1,260 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+%%% This version of the HTTP/1.1 client implements:
+%%% - RFC 2616 HTTP 1.1 client part
+%%% - RFC 2817 Upgrading to TLS Within HTTP/1.1 (not yet!)
+%%% - RFC 2818 HTTP Over TLS
+%%% - RFC 3229 Delta encoding in HTTP (not yet!)
+%%% - RFC 3230 Instance Digests in HTTP (not yet!)
+%%% - RFC 3310 Authentication and Key Agreement (AKA) (not yet!)
+%%% - HTTP/1.1 Specification Errata found at
+%%% http://world.std.com/~lawrence/http_errata.html
+%%% Additionaly follows the following recommendations:
+%%% - RFC 3143 Known HTTP Proxy/Caching Problems (not yet!)
+%%% - draft-nottingham-hdrreg-http-00.txt (not yet!)
+%%%
+%%% Depends on
+%%% - uri.erl for all URL parsing (except what is handled by the C driver)
+%%% - http_lib.erl for all parsing of body and headers
+%%%
+%%% Supported Settings are:
+%%% http_timeout % (int) Milliseconds before a request times out
+%%% http_useproxy % (bool) True if a proxy should be used
+%%% http_proxy % (string) Proxy
+%%% http_noproxylist % (list) List with hosts not requiring proxy
+%%% http_autoredirect % (bool) True if automatic redirection on 30X responses
+%%% http_ssl % (list) SSL settings. A non-empty list enables SSL/TLS
+%%% support in the HTTP client
+%%% http_pipelinesize % (int) Length of pipeline. 1 means no pipeline.
+%%% Only has effect when initiating a new session.
+%%% http_sessions % (int) Max number of open sessions for {Addr,Port}
+%%%
+%%% TODO: (Known bugs!)
+%% - Cache handling
+%% - Doesn't handle a bunch of entity headers properly
+%% - Better handling of status codes different from 200,30X and 50X
+%% - Many of the settings above are not implemented!
+%% - close_session/2 and cancel_request/1 doesn't work
+%% - Variable pipe size.
+%% - Due to the fact that inet_drv only has a single timer, the timeouts given
+%% for pipelined requests are not ok (too long)
+%%
+%% Note:
+%% - Some servers (e.g. Microsoft-IIS/5.0) may sometimes not return a proper
+%% 'Location' header on a redirect.
+%% The client will fail with {error,no_scheme} in these cases.
+
+-module(http).
+-author("[email protected]").
+
+-export([start/0,
+ request/3,request/4,cancel_request/1,
+ request_sync/2,request_sync/3]).
+
+-include("http.hrl").
+-include("jnets_httpd.hrl").
+
+-define(START_OPTIONS,[]).
+
+%%% HTTP Client manager. Used to store open connections.
+%%% Will be started automatically unless started explicitly.
+start() ->
+ application:start(ssl),
+ httpc_manager:start().
+
+%%% Asynchronous HTTP request that spawns a handler.
+%%% Method HTTPReq
+%%% options,get,head,delete,trace = {Url,Headers}
+%%% post,put = {Url,Headers,ContentType,Body}
+%%% where Url is a {Scheme,Host,Port,PathQuery} tuple, as returned by uri.erl
+%%%
+%%% Returns: {ok,ReqId} |
+%%% {error,Reason}
+%%% If {ok,Pid} was returned, the handler will return with
+%%% gen_server:cast(From,{Ref,ReqId,{error,Reason}}) |
+%%% gen_server:cast(From,{Ref,ReqId,{Status,Headers,Body}})
+%%% where Reason is an atom and Headers a #res_headers{} record
+%%% http:format_error(Reason) gives a more informative description.
+%%%
+%%% Note:
+%%% - Always try to find an open connection to a given host and port, and use
+%%% the associated socket.
+%%% - Unless a 'Connection: close' header is provided don't close the socket
+%%% after a response is given
+%%% - A given Pid, found in the database, might be terminated before the
+%%% message is sent to the Pid. This will happen e.g., if the connection is
+%%% closed by the other party and there are no pending requests.
+%%% - The HTTP connection process is spawned, if necessary, in
+%%% httpc_manager:add_connection/4
+request(Ref,Method,HTTPReqCont) ->
+ request(Ref,Method,HTTPReqCont,[],self()).
+
+request(Ref,Method,HTTPReqCont,Settings) ->
+ request(Ref,Method,HTTPReqCont,Settings,self()).
+
+request(Ref,Method,{{Scheme,Host,Port,PathQuery},
+ Headers,ContentType,Body},Settings,From) ->
+ case create_settings(Settings,#client_settings{}) of
+ {error,Reason} ->
+ {error,Reason};
+ CS ->
+ case create_headers(Headers,#req_headers{}) of
+ {error,Reason} ->
+ {error,Reason};
+ H ->
+ Req=#request{ref=Ref,from=From,
+ scheme=Scheme,address={Host,Port},
+ pathquery=PathQuery,method=Method,
+ headers=H,content={ContentType,Body},
+ settings=CS},
+ httpc_manager:request(Req)
+ end
+ end;
+request(Ref,Method,{Url,Headers},Settings,From) ->
+ request(Ref,Method,{Url,Headers,[],[]},Settings,From).
+
+%%% Cancels requests identified with ReqId.
+%%% FIXME! Doesn't work...
+cancel_request(ReqId) ->
+ httpc_manager:cancel_request(ReqId).
+
+%%% Close all sessions currently open to Host:Port
+%%% FIXME! Doesn't work...
+close_session(Host,Port) ->
+ httpc_manager:close_session(Host,Port).
+
+
+%%% Synchronous HTTP request that waits until a response is created
+%%% (e.g. successfull reply or timeout)
+%%% Method HTTPReq
+%%% options,get,head,delete,trace = {Url,Headers}
+%%% post,put = {Url,Headers,ContentType,Body}
+%%% where Url is a string() or a {Scheme,Host,Port,PathQuery} tuple
+%%%
+%%% Returns: {Status,Headers,Body} |
+%%% {error,Reason}
+%%% where Reason is an atom.
+%%% http:format_error(Reason) gives a more informative description.
+request_sync(Method,HTTPReqCont) ->
+ request_sync(Method,HTTPReqCont,[]).
+
+request_sync(Method,{Url,Headers},Settings)
+ when Method==options;Method==get;Method==head;Method==delete;Method==trace ->
+ case uri:parse(Url) of
+ {error,Reason} ->
+ {error,Reason};
+ ParsedUrl ->
+ request_sync(Method,{ParsedUrl,Headers,[],[]},Settings,0)
+ end;
+request_sync(Method,{Url,Headers,ContentType,Body},Settings)
+ when Method==post;Method==put ->
+ case uri:parse(Url) of
+ {error,Reason} ->
+ {error,Reason};
+ ParsedUrl ->
+ request_sync(Method,{ParsedUrl,Headers,ContentType,Body},Settings,0)
+ end;
+request_sync(Method,Request,Settings) ->
+ {error,bad_request}.
+
+request_sync(Method,HTTPCont,Settings,_Redirects) ->
+ case request(request_sync,Method,HTTPCont,Settings,self()) of
+ {ok,_ReqId} ->
+ receive
+ {'$gen_cast',{request_sync,_ReqId2,{Status,Headers,Body}}} ->
+ {Status,pp_headers(Headers),binary_to_list(Body)};
+ {'$gen_cast',{request_sync,_ReqId2,{error,Reason}}} ->
+ {error,Reason};
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+
+create_settings([],Out) ->
+ Out;
+create_settings([{http_timeout,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{timeout=Val});
+create_settings([{http_useproxy,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{useproxy=Val});
+create_settings([{http_proxy,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{proxy=Val});
+create_settings([{http_noproxylist,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{noproxylist=Val});
+create_settings([{http_autoredirect,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{autoredirect=Val});
+create_settings([{http_ssl,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{ssl=Val});
+create_settings([{http_pipelinesize,Val}|Settings],Out)
+ when integer(Val),Val>0 ->
+ create_settings(Settings,Out#client_settings{max_quelength=Val});
+create_settings([{http_sessions,Val}|Settings],Out)
+ when integer(Val),Val>0 ->
+ create_settings(Settings,Out#client_settings{max_sessions=Val});
+create_settings([{Key,_Val}|_Settings],_Out) ->
+ io:format("ERROR bad settings, got ~p~n",[Key]),
+ {error,bad_settings}.
+
+
+create_headers([],Req) ->
+ Req;
+create_headers([{Key,Val}|Rest],Req) ->
+ case httpd_util:to_lower(Key) of
+ "expect" ->
+ create_headers(Rest,Req#req_headers{expect=Val});
+ OtherKey ->
+ create_headers(Rest,
+ Req#req_headers{other=[{OtherKey,Val}|
+ Req#req_headers.other]})
+ end.
+
+
+pp_headers(#res_headers{connection=Connection,
+ transfer_encoding=Transfer_encoding,
+ retry_after=Retry_after,
+ content_length=Content_length,
+ content_type=Content_type,
+ location=Location,
+ other=Other}) ->
+ H1=case Connection of
+ undefined -> [];
+ _ -> [{'Connection',Connection}]
+ end,
+ H2=case Transfer_encoding of
+ undefined -> [];
+ _ -> [{'Transfer-Encoding',Transfer_encoding}]
+ end,
+ H3=case Retry_after of
+ undefined -> [];
+ _ -> [{'Retry-After',Retry_after}]
+ end,
+ H4=case Location of
+ undefined -> [];
+ _ -> [{'Location',Location}]
+ end,
+ HCL=case Content_length of
+ "0" -> [];
+ _ -> [{'Content-Length',Content_length}]
+ end,
+ HCT=case Content_type of
+ undefined -> [];
+ _ -> [{'Content-Type',Content_type}]
+ end,
+ H1++H2++H3++H4++HCL++HCT++Other.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.hrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.hrl
new file mode 100644
index 0000000000..f10ca47a9a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http.hrl
@@ -0,0 +1,127 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+-define(HTTP_REQUEST_TIMEOUT, 5000).
+-define(PIPELINE_LENGTH,3).
+-define(OPEN_SESSIONS,400).
+
+
+%%% FIXME! These definitions should probably be possible to defined via
+%%% user settings
+-define(MAX_REDIRECTS, 4).
+
+
+%%% Note that if not persitent the connection can be closed immediately on a
+%%% response, because new requests are not sent to this connection process.
+%%% address, % ({Host,Port}) Destination Host and Port
+-record(session,{
+ id, % (int) Session Id identifies session in http_manager
+ clientclose, % (bool) true if client requested "close" connection
+ scheme, % (atom) http (HTTP/TCP) or https (TCP/SSL/TCP)
+ socket, % (socket) Open socket, used by connection
+ pipeline=[], % (list) Sent requests, not yet taken care of by the
+ % associated http_responder.
+ quelength=1, % (int) Current length of pipeline (1 when created)
+ max_quelength% (int) Max pipeline length
+ }).
+
+%%% [{Pid,RequestQue,QueLength},...] list where
+%%% - RequestQue (implemented with a list) contains sent requests that
+%%% has not yet received a response (pipelined) AND is not currently
+%%% handled (awaiting data) by the session process.
+%%% - QueLength is the length of this que, but
+
+%%% Response headers
+-record(res_headers,{
+%%% --- Standard "General" headers
+% cache_control,
+ connection,
+% date,
+% pragma,
+% trailer,
+ transfer_encoding,
+% upgrade,
+% via,
+% warning,
+%%% --- Standard "Request" headers
+% accept_ranges,
+% age,
+% etag,
+ location,
+% proxy_authenticate,
+ retry_after,
+% server,
+% vary,
+% www_authenticate,
+%%% --- Standard "Entity" headers
+% allow,
+% content_encoding,
+% content_language,
+ content_length="0",
+% content_location,
+% content_md5,
+% content_range,
+ content_type,
+% expires,
+% last_modified,
+ other=[] % (list) Key/Value list with other headers
+ }).
+
+%%% All data associated to a specific HTTP request
+-record(request,{
+ id, % (int) Request Id
+ ref, % Caller specific
+ from, % (pid) Caller
+ redircount=0,% (int) Number of redirects made for this request
+ scheme, % (http|https) (HTTP/TCP) or (TCP/SSL/TCP) connection
+ address, % ({Host,Port}) Destination Host and Port
+ pathquery, % (string) Rest of parsed URL
+ method, % (atom) HTTP request Method
+ headers, % (list) Key/Value list with Headers
+ content, % ({ContentType,Body}) Current HTTP request
+ settings % (#client_settings{}) User defined settings
+ }).
+
+-record(response,{
+ scheme, % (atom) http (HTTP/TCP) or https (TCP/SSL/TCP)
+ socket, % (socket) Open socket, used by connection
+ status,
+ http_version,
+ headers=#res_headers{},
+ body = <<>>
+ }).
+
+
+
+
+%%% HTTP Client settings
+-record(client_settings,{
+ timeout=?HTTP_REQUEST_TIMEOUT,
+ % (int) Milliseconds before a request times out
+ useproxy=false, % (bool) True if the proxy should be used
+ proxy=undefined, % (tuple) Parsed Proxy URL
+ noproxylist=[], % (list) List with hosts not requiring proxy
+ autoredirect=true, % (bool) True if automatic redirection on 30X
+ % responses.
+ max_sessions=?OPEN_SESSIONS,% (int) Max open sessions for any Adr,Port
+ max_quelength=?PIPELINE_LENGTH, % (int) Max pipeline length
+% ssl=[{certfile,"/jb/server_root/ssl/ssl_client.pem"},
+% {keyfile,"/jb/server_root/ssl/ssl_client.pem"},
+% {verify,0}]
+ ssl=false % (list) SSL settings. A non-empty list enables SSL/TLS
+ % support in the HTTP client
+ }).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http_lib.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http_lib.erl
new file mode 100644
index 0000000000..eb8d7d66b1
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/http_lib.erl
@@ -0,0 +1,745 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+%%% File : http_lib.erl
+%%% Author : Johan Blom <[email protected]>
+%%% Description : Generic, HTTP specific helper functions
+%%% Created : 4 Mar 2002 by Johan Blom
+
+%%% TODO
+%%% - Check if I need to anything special when parsing
+%%% "Content-Type:multipart/form-data"
+
+-module(http_lib).
+-author("[email protected]").
+
+-include("http.hrl").
+-include("jnets_httpd.hrl").
+
+-export([connection_close/1,
+ accept/3,deliver/3,recv/4,recv0/3,
+ connect/1,send/3,close/2,controlling_process/3,setopts/3,
+ getParameterValue/2,
+% get_var/2,
+ create_request_line/3]).
+
+-export([read_client_headers/2,read_server_headers/2,
+ get_auth_data/1,create_header_list/1,
+ read_client_body/2,read_client_multipartrange_body/3,
+ read_server_body/2]).
+
+
+%%% Server response:
+%%% Check "Connection" header if server requests session to be closed.
+%%% No 'close' means returns false
+%%% Client Request:
+%%% Check if 'close' in request headers
+%%% Only care about HTTP 1.1 clients!
+connection_close(Headers) when record(Headers,req_headers) ->
+ case Headers#req_headers.connection of
+ "close" ->
+ true;
+ "keep-alive" ->
+ false;
+ Value when list(Value) ->
+ true;
+ _ ->
+ false
+ end;
+connection_close(Headers) when record(Headers,res_headers) ->
+ case Headers#res_headers.connection of
+ "close" ->
+ true;
+ "keep-alive" ->
+ false;
+ Value when list(Value) ->
+ true;
+ _ ->
+ false
+ end.
+
+
+%% =============================================================================
+%%% Debugging:
+
+% format_time(TS) ->
+% {_,_,MicroSecs}=TS,
+% {{Y,Mon,D},{H,M,S}}=calendar:now_to_universal_time(TS),
+% lists:flatten(io_lib:format("~4.4.0w-~2.2.0w-~2.2.0w,~2.2.0w:~2.2.0w:~6.3.0f",
+% [Y,Mon,D,H,M,S+(MicroSecs/1000000)])).
+
+%% Time in milli seconds
+% t() ->
+% {A,B,C} = erlang:now(),
+% A*1000000000+B*1000+(C div 1000).
+
+% sz(L) when list(L) ->
+% length(L);
+% sz(B) when binary(B) ->
+% size(B);
+% sz(O) ->
+% {unknown_size,O}.
+
+
+%% =============================================================================
+
+getHeaderValue(_Attr,[]) ->
+ [];
+getHeaderValue(Attr,[{Attr,Value}|_Rest]) ->
+ Value;
+getHeaderValue(Attr,[_|Rest]) ->
+ getHeaderValue(Attr,Rest).
+
+getParameterValue(_Attr,undefined) ->
+ undefined;
+getParameterValue(Attr,List) ->
+ case lists:keysearch(Attr,1,List) of
+ {value,{Attr,Val}} ->
+ Val;
+ _ ->
+ undefined
+ end.
+
+create_request_line(Method,Path,{Major,Minor}) ->
+ [atom_to_list(Method)," ",Path,
+ " HTTP/",integer_to_list(Major),".",integer_to_list(Minor)];
+create_request_line(Method,Path,Minor) ->
+ [atom_to_list(Method)," ",Path," HTTP/1.",integer_to_list(Minor)].
+
+
+%%% ============================================================================
+read_client_headers(Info,Timeout) ->
+ Headers=read_response_h(Info#response.scheme,Info#response.socket,Timeout,
+ Info#response.headers),
+ Info#response{headers=Headers}.
+
+read_server_headers(Info,Timeout) ->
+ Headers=read_request_h(Info#mod.socket_type,Info#mod.socket,Timeout,
+ Info#mod.headers),
+ Info#mod{headers=Headers}.
+
+
+%% Parses the header of a HTTP request and returns a key,value tuple
+%% list containing Name and Value of each header directive as of:
+%%
+%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
+%%
+%% But in http/1.1 the field-names are case insencitive so now it must be
+%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
+%% The standard furthermore says that leading and traling white space
+%% is not a part of the fieldvalue and shall therefore be removed.
+read_request_h(SType,S,Timeout,H) ->
+ case recv0(SType,S,Timeout) of
+ {ok,{http_header,_,'Connection',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{connection=Value});
+ {ok,{http_header,_,'Content-Type',_,Val}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{content_type=Val});
+ {ok,{http_header,_,'Host',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{host=Value});
+ {ok,{http_header,_,'Content-Length',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{content_length=Value});
+% {ok,{http_header,_,'Expect',_,Value}} -> % FIXME! Update inet_drv.c!!
+% read_request_h(SType,S,Timeout,H#req_headers{expect=Value});
+ {ok,{http_header,_,'Transfer-Encoding',_,V}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{transfer_encoding=V});
+ {ok,{http_header,_,'Authorization',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{authorization=Value});
+ {ok,{http_header,_,'User-Agent',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{user_agent=Value});
+ {ok,{http_header,_,'Range',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{range=Value});
+ {ok,{http_header,_,'If-Range',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_range=Value});
+ {ok,{http_header,_,'If-Match',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_match=Value});
+ {ok,{http_header,_,'If-None-Match',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_none_match=Value});
+ {ok,{http_header,_,'If-Modified-Since',_,V}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_modified_since=V});
+ {ok,{http_header,_,'If-Unmodified-Since',_,V}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_unmodified_since=V});
+ {ok,{http_header,_,K,_,V}} ->
+ read_request_h(SType,S,Timeout,
+ H#req_headers{other=H#req_headers.other++[{K,V}]});
+ {ok,http_eoh} ->
+ H;
+ {error, timeout} when SType==http ->
+ throw({error, session_local_timeout});
+ {error, etimedout} when SType==https ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+ throw({error,Reason})
+ end.
+
+
+read_response_h(SType,S,Timeout,H) ->
+ case recv0(SType,S,Timeout) of
+ {ok,{http_header,_,'Connection',_,Val}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{connection=Val});
+ {ok,{http_header,_,'Content-Length',_,Val}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{content_length=Val});
+ {ok,{http_header,_,'Content-Type',_,Val}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{content_type=Val});
+ {ok,{http_header,_,'Transfer-Encoding',_,V}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{transfer_encoding=V});
+ {ok,{http_header,_,'Location',_,V}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{location=V});
+ {ok,{http_header,_,'Retry-After',_,V}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{retry_after=V});
+ {ok,{http_header,_,K,_,V}} ->
+ read_response_h(SType,S,Timeout,
+ H#res_headers{other=H#res_headers.other++[{K,V}]});
+ {ok,http_eoh} ->
+ H;
+ {error, timeout} when SType==http ->
+ throw({error, session_local_timeout});
+ {error, etimedout} when SType==https ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+ throw({error,Reason})
+ end.
+
+
+%%% Got the headers, and maybe a part of the body, now read in the rest
+%%% Note:
+%%% - No need to check for Expect header if client
+%%% - Currently no support for setting MaxHeaderSize in client, set to
+%%% unlimited.
+%%% - Move to raw packet mode as we are finished with HTTP parsing
+read_client_body(Info,Timeout) ->
+ Headers=Info#response.headers,
+ case Headers#res_headers.transfer_encoding of
+ "chunked" ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Chunked Data:",[]),
+ read_client_chunked_body(Info,Timeout,?MAXBODYSIZE);
+ Encoding when list(Encoding) ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Unknown",[]),
+ throw({error,unknown_coding});
+ _ ->
+ ContLen=list_to_integer(Headers#res_headers.content_length),
+ if
+ ContLen>?MAXBODYSIZE ->
+ throw({error,body_too_big});
+ true ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:none ",[]),
+ Info#response{body=read_plain_body(Info#response.scheme,
+ Info#response.socket,
+ ContLen,
+ Info#response.body,
+ Timeout)}
+ end
+ end.
+
+
+%%% ----------------------------------------------------------------------
+read_server_body(Info,Timeout) ->
+ MaxBodySz=httpd_util:lookup(Info#mod.config_db,max_body_size,?MAXBODYSIZE),
+ ContLen=list_to_integer((Info#mod.headers)#req_headers.content_length),
+ %% ?vtrace("ContentLength: ~p", [ContLen]),
+ if
+ integer(ContLen),integer(MaxBodySz),ContLen>MaxBodySz ->
+ throw({error,body_too_big});
+ true ->
+ read_server_body2(Info,Timeout,ContLen,MaxBodySz)
+ end.
+
+
+%%----------------------------------------------------------------------
+%% Control if the body is transfer encoded, if so decode it.
+%% Note:
+%% - MaxBodySz has an integer value or 'nolimit'
+%% - ContLen has an integer value or 'undefined'
+%% All applications MUST be able to receive and decode the "chunked"
+%% transfer-coding, see RFC 2616 Section 3.6.1
+read_server_body2(Info,Timeout,ContLen,MaxBodySz) ->
+ ?DEBUG("read_entity_body2()->Max: ~p ~nLength:~p ~nSocket: ~p ~n",
+ [MaxBodySz,ContLen,Info#mod.socket]),
+ case (Info#mod.headers)#req_headers.transfer_encoding of
+ "chunked" ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Chunked Data:",[]),
+ read_server_chunked_body(Info,Timeout,MaxBodySz);
+ Encoding when list(Encoding) ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Unknown",[]),
+ httpd_response:send_status(Info,501,"Unknown Transfer-Encoding"),
+ http_lib:close(Info#mod.socket_type,Info#mod.socket),
+ throw({error,{status_sent,"Unknown Transfer-Encoding "++Encoding}});
+ _ when integer(ContLen),integer(MaxBodySz),ContLen>MaxBodySz ->
+ throw({error,body_too_big});
+ _ when integer(ContLen) ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:none ",[]),
+ Info#mod{entity_body=read_plain_body(Info#mod.socket_type,
+ Info#mod.socket,
+ ContLen,Info#mod.entity_body,
+ Timeout)}
+ end.
+
+
+%%% ----------------------------------------------------------------------------
+%%% The body was plain, just read it from the socket.
+read_plain_body(_SocketType,Socket,0,Cont,_Timeout) ->
+ Cont;
+read_plain_body(SocketType,Socket,ContLen,Cont,Timeout) ->
+ Body=read_more_data(SocketType,Socket,ContLen,Timeout),
+ <<Cont/binary,Body/binary>>.
+
+%%% ----------------------------------------------------------------------------
+%%% The body was chunked, decode it.
+%%% From RFC2616, Section 3.6.1
+%% Chunked-Body = *chunk
+%% last-chunk
+%% trailer
+%% CRLF
+%%
+%% chunk = chunk-size [ chunk-extension ] CRLF
+%% chunk-data CRLF
+%% chunk-size = 1*HEX
+%% last-chunk = 1*("0") [ chunk-extension ] CRLF
+%%
+%% chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
+%% chunk-ext-name = token
+%% chunk-ext-val = token | quoted-string
+%% chunk-data = chunk-size(OCTET)
+%% trailer = *(entity-header CRLF)
+%%
+%%% "All applications MUST ignore chunk-extension extensions they do not
+%%% understand.", see RFC 2616 Section 3.6.1
+%%% We don't understand any extension...
+read_client_chunked_body(Info,Timeout,MaxChunkSz) ->
+ case read_chunk(Info#response.scheme,Info#response.socket,
+ Timeout,0,MaxChunkSz) of
+ {last_chunk,_ExtensionList} -> % Ignore extension
+ TrailH=read_headers_old(Info#response.scheme,Info#response.socket,
+ Timeout),
+ H=Info#response.headers,
+ OtherHeaders=H#res_headers.other++TrailH,
+ Info#response{headers=H#res_headers{other=OtherHeaders}};
+ {Chunk,ChunkSize,_ExtensionList} -> % Ignore extension
+ Info1=Info#response{body= <<(Info#response.body)/binary,
+ Chunk/binary>>},
+ read_client_chunked_body(Info1,Timeout,MaxChunkSz-ChunkSize);
+ {error,Reason} ->
+ throw({error,Reason})
+ end.
+
+
+read_server_chunked_body(Info,Timeout,MaxChunkSz) ->
+ case read_chunk(Info#mod.socket_type,Info#mod.socket,
+ Timeout,0,MaxChunkSz) of
+ {last_chunk,_ExtensionList} -> % Ignore extension
+ TrailH=read_headers_old(Info#mod.socket_type,Info#mod.socket,
+ Timeout),
+ H=Info#mod.headers,
+ OtherHeaders=H#req_headers.other++TrailH,
+ Info#mod{headers=H#req_headers{other=OtherHeaders}};
+ {Chunk,ChunkSize,_ExtensionList} -> % Ignore extension
+ Info1=Info#mod{entity_body= <<(Info#mod.entity_body)/binary,
+ Chunk/binary>>},
+ read_server_chunked_body(Info1,Timeout,MaxChunkSz-ChunkSize);
+ {error,Reason} ->
+ throw({error,Reason})
+ end.
+
+
+read_chunk(Scheme,Socket,Timeout,Int,MaxChunkSz) when MaxChunkSz>Int ->
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ <<C>> when $0=<C,C=<$9 ->
+ read_chunk(Scheme,Socket,Timeout,16*Int+(C-$0),MaxChunkSz);
+ <<C>> when $a=<C,C=<$f ->
+ read_chunk(Scheme,Socket,Timeout,16*Int+10+(C-$a),MaxChunkSz);
+ <<C>> when $A=<C,C=<$F ->
+ read_chunk(Scheme,Socket,Timeout,16*Int+10+(C-$A),MaxChunkSz);
+ <<$;>> when Int>0 ->
+ ExtensionList=read_chunk_ext_name(Scheme,Socket,Timeout,[],[]),
+ read_chunk_data(Scheme,Socket,Int+1,ExtensionList,Timeout);
+ <<$;>> when Int==0 ->
+ ExtensionList=read_chunk_ext_name(Scheme,Socket,Timeout,[],[]),
+ read_data_lf(Scheme,Socket,Timeout),
+ {last_chunk,ExtensionList};
+ <<?CR>> when Int>0 ->
+ read_chunk_data(Scheme,Socket,Int+1,[],Timeout);
+ <<?CR>> when Int==0 ->
+ read_data_lf(Scheme,Socket,Timeout),
+ {last_chunk,[]};
+ <<C>> when C==$ -> % Some servers (e.g., Apache 1.3.6) throw in
+ % additional whitespace...
+ read_chunk(Scheme,Socket,Timeout,Int,MaxChunkSz);
+ _Other ->
+ {error,unexpected_chunkdata}
+ end;
+read_chunk(_Scheme,_Socket,_Timeout,_Int,_MaxChunkSz) ->
+ {error,body_too_big}.
+
+
+%%% Note:
+%%% - Got the initial ?CR already!
+%%% - Bitsyntax does not allow matching of ?CR,?LF in the end of the first read
+read_chunk_data(Scheme,Socket,Int,ExtensionList,Timeout) ->
+ case read_more_data(Scheme,Socket,Int,Timeout) of
+ <<?LF,Chunk/binary>> ->
+ case read_more_data(Scheme,Socket,2,Timeout) of
+ <<?CR,?LF>> ->
+ {Chunk,size(Chunk),ExtensionList};
+ _ ->
+ {error,bad_chunkdata}
+ end;
+ _ ->
+ {error,bad_chunkdata}
+ end.
+
+read_chunk_ext_name(Scheme,Socket,Timeout,Name,Acc) ->
+ Len=length(Name),
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ $= when Len>0 ->
+ read_chunk_ext_val(Scheme,Socket,Timeout,Name,[],Acc);
+ $; when Len>0 ->
+ read_chunk_ext_name(Scheme,Socket,Timeout,[],
+ [{lists:reverse(Name),""}|Acc]);
+ ?CR when Len>0 ->
+ lists:reverse([{lists:reverse(Name,"")}|Acc]);
+ Token -> % FIXME Check that it is "token"
+ read_chunk_ext_name(Scheme,Socket,Timeout,[Token|Name],Acc);
+ _ ->
+ {error,bad_chunk_extension_name}
+ end.
+
+read_chunk_ext_val(Scheme,Socket,Timeout,Name,Val,Acc) ->
+ Len=length(Val),
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ $; when Len>0 ->
+ read_chunk_ext_name(Scheme,Socket,Timeout,[],
+ [{Name,lists:reverse(Val)}|Acc]);
+ ?CR when Len>0 ->
+ lists:reverse([{Name,lists:reverse(Val)}|Acc]);
+ Token -> % FIXME Check that it is "token" or "quoted-string"
+ read_chunk_ext_val(Scheme,Socket,Timeout,Name,[Token|Val],Acc);
+ _ ->
+ {error,bad_chunk_extension_value}
+ end.
+
+read_data_lf(Scheme,Socket,Timeout) ->
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ ?LF ->
+ ok;
+ _ ->
+ {error,bad_chunkdata}
+ end.
+
+%%% ----------------------------------------------------------------------------
+%%% The body was "multipart/byteranges", decode it.
+%%% Example from RFC 2616, Appendix 19.2
+%%% HTTP/1.1 206 Partial Content
+%%% Date: Wed, 15 Nov 1995 06:25:24 GMT
+%%% Last-Modified: Wed, 15 Nov 1995 04:58:08 GMT
+%%% Content-type: multipart/byteranges; boundary=THIS_STRING_SEPARATES
+%%%
+%%% --THIS_STRING_SEPARATES
+%%% Content-type: application/pdf
+%%% Content-range: bytes 500-999/8000
+%%%
+%%% ...the first range...
+%%% --THIS_STRING_SEPARATES
+%%% Content-type: application/pdf
+%%% Content-range: bytes 7000-7999/8000
+%%%
+%%% ...the second range
+%%% --THIS_STRING_SEPARATES--
+%%%
+%%% Notes:
+%%%
+%%% 1) Additional CRLFs may precede the first boundary string in the
+%%% entity.
+%%% FIXME!!
+read_client_multipartrange_body(Info,Parstr,Timeout) ->
+ Boundary=get_boundary(Parstr),
+ scan_boundary(Info,Boundary),
+ Info#response{body=read_multipart_body(Info,Boundary,Timeout)}.
+
+read_multipart_body(Info,Boundary,Timeout) ->
+ Info.
+
+% Headers=read_headers_old(Info#response.scheme,Info#response.socket,Timeout),
+% H=Info#response.headers,
+% OtherHeaders=H#res_headers.other++TrailH,
+% Info#response{headers=H#res_headers{other=OtherHeaders}}.
+
+
+scan_boundary(Info,Boundary) ->
+ Info.
+
+
+get_boundary(Parstr) ->
+ case skip_lwsp(Parstr) of
+ [] ->
+ throw({error,missing_range_boundary_parameter});
+ Val ->
+ get_boundary2(string:tokens(Val, ";"))
+ end.
+
+get_boundary2([]) ->
+ undefined;
+get_boundary2([Param|Rest]) ->
+ case string:tokens(skip_lwsp(Param), "=") of
+ ["boundary"++Attribute,Value] ->
+ Value;
+ _ ->
+ get_boundary2(Rest)
+ end.
+
+
+%% skip space & tab
+skip_lwsp([$ | Cs]) -> skip_lwsp(Cs);
+skip_lwsp([$\t | Cs]) -> skip_lwsp(Cs);
+skip_lwsp(Cs) -> Cs.
+
+%%% ----------------------------------------------------------------------------
+
+%%% Read the incoming data from the open socket.
+read_more_data(http,Socket,Len,Timeout) ->
+ case gen_tcp:recv(Socket,Len,Timeout) of
+ {ok,Val} ->
+ Val;
+ {error, timeout} ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+% httpd_response:send_status(Info,400,none),
+ throw({error, Reason})
+ end;
+read_more_data(https,Socket,Len,Timeout) ->
+ case ssl:recv(Socket,Len,Timeout) of
+ {ok,Val} ->
+ Val;
+ {error, etimedout} ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+% httpd_response:send_status(Info,400,none),
+ throw({error, Reason})
+ end.
+
+
+%% =============================================================================
+%%% Socket handling
+
+accept(http,ListenSocket, Timeout) ->
+ gen_tcp:accept(ListenSocket, Timeout);
+accept(https,ListenSocket, Timeout) ->
+ ssl:accept(ListenSocket, Timeout).
+
+
+close(http,Socket) ->
+ gen_tcp:close(Socket);
+close(https,Socket) ->
+ ssl:close(Socket).
+
+
+connect(#request{scheme=http,settings=Settings,address=Addr}) ->
+ case proxyusage(Addr,Settings) of
+ {error,Reason} ->
+ {error,Reason};
+ {Host,Port} ->
+ Opts=[binary,{active,false},{reuseaddr,true}],
+ gen_tcp:connect(Host,Port,Opts)
+ end;
+connect(#request{scheme=https,settings=Settings,address=Addr}) ->
+ case proxyusage(Addr,Settings) of
+ {error,Reason} ->
+ {error,Reason};
+ {Host,Port} ->
+ Opts=case Settings#client_settings.ssl of
+ false ->
+ [binary,{active,false}];
+ SSLSettings ->
+ [binary,{active,false}]++SSLSettings
+ end,
+ ssl:connect(Host,Port,Opts)
+ end.
+
+
+%%% Check to see if the given {Host,Port} tuple is in the NoProxyList
+%%% Returns an eventually updated {Host,Port} tuple, with the proxy address
+proxyusage(HostPort,Settings) ->
+ case Settings#client_settings.useproxy of
+ true ->
+ case noProxy(HostPort,Settings#client_settings.noproxylist) of
+ true ->
+ HostPort;
+ _ ->
+ case Settings#client_settings.proxy of
+ undefined ->
+ {error,no_proxy_defined};
+ ProxyHostPort ->
+ ProxyHostPort
+ end
+ end;
+ _ ->
+ HostPort
+ end.
+
+noProxy(_HostPort,[]) ->
+ false;
+noProxy({Host,Port},[{Host,Port}|Rest]) ->
+ true;
+noProxy(HostPort,[_|Rest]) ->
+ noProxy(HostPort,Rest).
+
+
+controlling_process(http,Socket,Pid) ->
+ gen_tcp:controlling_process(Socket,Pid);
+controlling_process(https,Socket,Pid) ->
+ ssl:controlling_process(Socket,Pid).
+
+
+deliver(SocketType, Socket, Message) ->
+ case send(SocketType, Socket, Message) of
+ {error, einval} ->
+ close(SocketType, Socket),
+ socket_closed;
+ {error, _Reason} ->
+% ?vlog("deliver(~p) failed for reason:"
+% "~n Reason: ~p",[SocketType,_Reason]),
+ close(SocketType, Socket),
+ socket_closed;
+ _Other ->
+ ok
+ end.
+
+
+recv0(http,Socket,Timeout) ->
+ gen_tcp:recv(Socket,0,Timeout);
+recv0(https,Socket,Timeout) ->
+ ssl:recv(Socket,0,Timeout).
+
+recv(http,Socket,Len,Timeout) ->
+ gen_tcp:recv(Socket,Len,Timeout);
+recv(https,Socket,Len,Timeout) ->
+ ssl:recv(Socket,Len,Timeout).
+
+
+setopts(http,Socket,Options) ->
+ inet:setopts(Socket,Options);
+setopts(https,Socket,Options) ->
+ ssl:setopts(Socket,Options).
+
+
+send(http,Socket,Message) ->
+ gen_tcp:send(Socket,Message);
+send(https,Socket,Message) ->
+ ssl:send(Socket,Message).
+
+
+%%% ============================================================================
+%%% HTTP Server only
+
+%%% Returns the Authenticating data in the HTTP request
+get_auth_data("Basic "++EncodedString) ->
+ UnCodedString=httpd_util:decode_base64(EncodedString),
+ case catch string:tokens(UnCodedString,":") of
+ [User,PassWord] ->
+ {User,PassWord};
+ {error,Error}->
+ {error,Error}
+ end;
+get_auth_data(BadCredentials) when list(BadCredentials) ->
+ {error,BadCredentials};
+get_auth_data(_) ->
+ {error,nouser}.
+
+
+create_header_list(H) ->
+ lookup(connection,H#req_headers.connection)++
+ lookup(host,H#req_headers.host)++
+ lookup(content_length,H#req_headers.content_length)++
+ lookup(transfer_encoding,H#req_headers.transfer_encoding)++
+ lookup(authorization,H#req_headers.authorization)++
+ lookup(user_agent,H#req_headers.user_agent)++
+ lookup(user_agent,H#req_headers.range)++
+ lookup(user_agent,H#req_headers.if_range)++
+ lookup(user_agent,H#req_headers.if_match)++
+ lookup(user_agent,H#req_headers.if_none_match)++
+ lookup(user_agent,H#req_headers.if_modified_since)++
+ lookup(user_agent,H#req_headers.if_unmodified_since)++
+ H#req_headers.other.
+
+lookup(_Key,undefined) ->
+ [];
+lookup(Key,Val) ->
+ [{Key,Val}].
+
+
+
+%%% ============================================================================
+%%% This code is for parsing trailer headers in chunked messages.
+%%% Will be deprecated whenever I have found an alternative working solution!
+%%% Note:
+%%% - The header names are returned slighly different from what the what
+%%% inet_drv returns
+read_headers_old(Scheme,Socket,Timeout) ->
+ read_headers_old(<<>>,Scheme,Socket,Timeout,[],[]).
+
+read_headers_old(<<>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ read_headers_old(read_more_data(Scheme,Socket,1,Timeout),
+ Scheme,Socket,Timeout,Acc,AccHdrs);
+read_headers_old(<<$\r>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ read_headers_old(<<$\r,(read_more_data(Scheme,Socket,1,Timeout))/binary>>,
+ Scheme,Socket,Timeout,Acc,AccHdrs);
+read_headers_old(<<$\r,$\n>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ if
+ Acc==[] -> % Done!
+ tagup_header(lists:reverse(AccHdrs));
+ true ->
+ read_headers_old(read_more_data(Scheme,Socket,1,Timeout),
+ Scheme,Socket,
+ Timeout,[],[lists:reverse(Acc)|AccHdrs])
+ end;
+read_headers_old(<<C>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ read_headers_old(read_more_data(Scheme,Socket,1,Timeout),
+ Scheme,Socket,Timeout,[C|Acc],AccHdrs);
+read_headers_old(Bin,_Scheme,_Socket,_Timeout,_Acc,_AccHdrs) ->
+ io:format("ERROR: Unexpected data from inet driver: ~p",[Bin]),
+ throw({error,this_is_a_bug}).
+
+
+%% Parses the header of a HTTP request and returns a key,value tuple
+%% list containing Name and Value of each header directive as of:
+%%
+%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
+%%
+%% But in http/1.1 the field-names are case insencitive so now it must be
+%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
+%% The standard furthermore says that leading and traling white space
+%% is not a part of the fieldvalue and shall therefore be removed.
+tagup_header([]) -> [];
+tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
+
+tag([], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), ""};
+tag([$:|Rest], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), httpd_util:strip(Rest)};
+tag([Chr|Rest], Tag) ->
+ tag(Rest, [Chr|Tag]).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_handler.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_handler.erl
new file mode 100644
index 0000000000..5076a12aaa
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_handler.erl
@@ -0,0 +1,724 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+%%% TODO:
+%%% - If an error is returned when sending a request, don't use this
+%%% session anymore.
+%%% - Closing of sessions not properly implemented for some cases
+
+%%% File : httpc_handler.erl
+%%% Author : Johan Blom <[email protected]>
+%%% Description : Handles HTTP client responses, for a single TCP session
+%%% Created : 4 Mar 2002 by Johan Blom
+
+-module(httpc_handler).
+
+-include("http.hrl").
+-include("jnets_httpd.hrl").
+
+-export([init_connection/2,http_request/2]).
+
+%%% ==========================================================================
+%%% "Main" function in the spawned process for the session.
+init_connection(Req,Session) when record(Req,request) ->
+ case catch http_lib:connect(Req) of
+ {ok,Socket} ->
+ case catch http_request(Req,Socket) of
+ ok ->
+ case Session#session.clientclose of
+ true ->
+ ok;
+ false ->
+ httpc_manager:register_socket(Req#request.address,
+ Session#session.id,
+ Socket)
+ end,
+ next_response_with_request(Req,
+ Session#session{socket=Socket});
+ {error,Reason} -> % Not possible to use new session
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session_ok(Req#request.address,
+ Session#session{socket=Socket})
+ end;
+ {error,Reason} -> % Not possible to set up new session
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session_ok2(Req#request.address,
+ Session#session.clientclose,Session#session.id)
+ end.
+
+next_response_with_request(Req,Session) ->
+ Timeout=(Req#request.settings)#client_settings.timeout,
+ case catch read(Timeout,Session#session.scheme,Session#session.socket) of
+ {Status,Headers,Body} ->
+ NewReq=handle_response({Status,Headers,Body},Timeout,Req,Session),
+ next_response_with_request(NewReq,Session);
+ {error,Reason} ->
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session(Req#request.address,Session,aborted_request);
+ {'EXIT',Reason} ->
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session(Req#request.address,Session,aborted_request)
+ end.
+
+handle_response(Response,Timeout,Req,Session) ->
+ case http_response(Response,Req,Session) of
+ ok ->
+ next_response(Timeout,Req#request.address,Session);
+ stop ->
+ exit(normal);
+ {error,Reason} ->
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session(Req#request.address,Session,aborted_request)
+ end.
+
+
+
+%%% Wait for the next respond until
+%%% - session is closed by the other side
+%%% => set up a new a session, if there are pending requests in the que
+%%% - "Connection:close" header is received
+%%% => close the connection (release socket) then
+%%% set up a new a session, if there are pending requests in the que
+%%%
+%%% Note:
+%%% - When invoked there are no pending responses on received requests.
+%%% - Never close the session explicitly, let it timeout instead!
+next_response(Timeout,Address,Session) ->
+ case httpc_manager:next_request(Address,Session#session.id) of
+ no_more_requests ->
+ %% There are no more pending responses, now just wait for
+ %% timeout or a new response.
+ case catch read(Timeout,
+ Session#session.scheme,Session#session.socket) of
+ {error,Reason} when Reason==session_remotely_closed;
+ Reason==session_local_timeout ->
+ exit_session_ok(Address,Session);
+ {error,Reason} ->
+ exit_session(Address,Session,aborted_request);
+ {'EXIT',Reason} ->
+ exit_session(Address,Session,aborted_request);
+ {Status2,Headers2,Body2} ->
+ case httpc_manager:next_request(Address,
+ Session#session.id) of
+ no_more_requests -> % Should not happen!
+ exit_session(Address,Session,aborted_request);
+ {error,Reason} -> % Should not happen!
+ exit_session(Address,Session,aborted_request);
+ NewReq ->
+ handle_response({Status2,Headers2,Body2},
+ Timeout,NewReq,Session)
+ end
+ end;
+ {error,Reason} -> % The connection has been closed by httpc_manager
+ exit_session(Address,Session,aborted_request);
+ NewReq ->
+ NewReq
+ end.
+
+%% ===========================================================================
+%% Internals
+
+%%% Read in and parse response data from the socket
+read(Timeout,SockType,Socket) ->
+ Info=#response{scheme=SockType,socket=Socket},
+ http_lib:setopts(SockType,Socket,[{packet, http}]),
+ Info1=read_response(SockType,Socket,Info,Timeout),
+ http_lib:setopts(SockType,Socket,[binary,{packet, raw}]),
+ case (Info1#response.headers)#res_headers.content_type of
+ "multipart/byteranges"++Param ->
+ range_response_body(Info1,Timeout,Param);
+ _ ->
+ #response{status=Status2,headers=Headers2,body=Body2}=
+ http_lib:read_client_body(Info1,Timeout),
+ {Status2,Headers2,Body2}
+ end.
+
+
+%%% From RFC 2616:
+%%% Status-Line = HTTP-Version SP Status-Code SP Reason-Phrase CRLF
+%%% HTTP-Version = "HTTP" "/" 1*DIGIT "." 1*DIGIT
+%%% Status-Code = 3DIGIT
+%%% Reason-Phrase = *<TEXT, excluding CR, LF>
+read_response(SockType,Socket,Info,Timeout) ->
+ case http_lib:recv0(SockType,Socket,Timeout) of
+ {ok,{http_response,{1,VerMin}, Status, _Phrase}} when VerMin==0;
+ VerMin==1 ->
+ Info1=Info#response{status=Status,http_version=VerMin},
+ http_lib:read_client_headers(Info1,Timeout);
+ {ok,{http_response,_Version, _Status, _Phrase}} ->
+ throw({error,bad_status_line});
+ {error, timeout} ->
+ throw({error,session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error,session_remotely_closed});
+ {error, Reason} ->
+ throw({error,Reason})
+ end.
+
+%%% From RFC 2616, Section 4.4, Page 34
+%% 4.If the message uses the media type "multipart/byteranges", and the
+%% transfer-length is not otherwise specified, then this self-
+%% delimiting media type defines the transfer-length. This media type
+%% MUST NOT be used unless the sender knows that the recipient can parse
+%% it; the presence in a request of a Range header with multiple byte-
+%% range specifiers from a 1.1 client implies that the client can parse
+%% multipart/byteranges responses.
+%%% FIXME !!
+range_response_body(Info,Timeout,Param) ->
+ Headers=Info#response.headers,
+ case {Headers#res_headers.content_length,
+ Headers#res_headers.transfer_encoding} of
+ {undefined,undefined} ->
+ #response{status=Status2,headers=Headers2,body=Body2}=
+ http_lib:read_client_multipartrange_body(Info,Param,Timeout),
+ {Status2,Headers2,Body2};
+ _ ->
+ #response{status=Status2,headers=Headers2,body=Body2}=
+ http_lib:read_client_body(Info,Timeout),
+ {Status2,Headers2,Body2}
+ end.
+
+
+%%% ----------------------------------------------------------------------------
+%%% Host: field is required when addressing multi-homed sites ...
+%%% It must not be present when the request is being made to a proxy.
+http_request(#request{method=Method,id=Id,
+ scheme=Scheme,address={Host,Port},pathquery=PathQuery,
+ headers=Headers, content={ContentType,Body},
+ settings=Settings},
+ Socket) ->
+ PostData=
+ if
+ Method==post;Method==put ->
+ case Headers#req_headers.expect of
+ "100-continue" ->
+ content_type_header(ContentType) ++
+ content_length_header(length(Body)) ++
+ "\r\n";
+ _ ->
+ content_type_header(ContentType) ++
+ content_length_header(length(Body)) ++
+ "\r\n" ++ Body
+ end;
+ true ->
+ "\r\n"
+ end,
+ Message=
+ case useProxy(Settings#client_settings.useproxy,
+ {Scheme,Host,Port,PathQuery}) of
+ false ->
+ method(Method)++" "++PathQuery++" HTTP/1.1\r\n"++
+ host_header(Host)++te_header()++
+ headers(Headers) ++ PostData;
+ AbsURI ->
+ method(Method)++" "++AbsURI++" HTTP/1.1\r\n"++
+ te_header()++
+ headers(Headers)++PostData
+ end,
+ http_lib:send(Scheme,Socket,Message).
+
+useProxy(false,_) ->
+ false;
+useProxy(true,{Scheme,Host,Port,PathQuery}) ->
+ [atom_to_list(Scheme),"://",Host,":",integer_to_list(Port),PathQuery].
+
+
+
+headers(#req_headers{expect=Expect,
+ other=Other}) ->
+ H1=case Expect of
+ undefined ->[];
+ _ -> "Expect: "++Expect++"\r\n"
+ end,
+ H1++headers_other(Other).
+
+
+headers_other([]) ->
+ [];
+headers_other([{Key,Value}|Rest]) when atom(Key) ->
+ Head = atom_to_list(Key)++": "++Value++"\r\n",
+ Head ++ headers_other(Rest);
+headers_other([{Key,Value}|Rest]) ->
+ Head = Key++": "++Value++"\r\n",
+ Head ++ headers_other(Rest).
+
+host_header(Host) ->
+ "Host: "++lists:concat([Host])++"\r\n".
+content_type_header(ContentType) ->
+ "Content-Type: " ++ ContentType ++ "\r\n".
+content_length_header(ContentLength) ->
+ "Content-Length: "++integer_to_list(ContentLength) ++ "\r\n".
+te_header() ->
+ "TE: \r\n".
+
+method(Method) ->
+ httpd_util:to_upper(atom_to_list(Method)).
+
+
+%%% ----------------------------------------------------------------------------
+http_response({Status,Headers,Body},Req,Session) ->
+ case Status of
+ 100 ->
+ status_continue(Req,Session);
+ 200 ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {Status,Headers,Body}}),
+ ServerClose=http_lib:connection_close(Headers),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ 300 -> status_multiple_choices(Headers,Body,Req,Session);
+ 301 -> status_moved_permanently(Req#request.method,
+ Headers,Body,Req,Session);
+ 302 -> status_found(Headers,Body,Req,Session);
+ 303 -> status_see_other(Headers,Body,Req,Session);
+ 304 -> status_not_modified(Headers,Body,Req,Session);
+ 305 -> status_use_proxy(Headers,Body,Req,Session);
+ %% 306 This Status code is not used in HTTP 1.1
+ 307 -> status_temporary_redirect(Headers,Body,Req,Session);
+ 503 -> status_service_unavailable({Status,Headers,Body},Req,Session);
+ Status50x when Status50x==500;Status50x==501;Status50x==502;
+ Status50x==504;Status50x==505 ->
+ status_server_error_50x({Status,Headers,Body},Req,Session);
+ _ -> % FIXME May want to take some action on other Status codes as well
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {Status,Headers,Body}}),
+ ServerClose=http_lib:connection_close(Headers),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session)
+ end.
+
+
+%%% Status code dependent functions.
+
+%%% Received a 100 Status code ("Continue")
+%%% From RFC2616
+%%% The client SHOULD continue with its request. This interim response is
+%%% used to inform the client that the initial part of the request has
+%%% been received and has not yet been rejected by the server. The client
+%%% SHOULD continue by sending the remainder of the request or, if the
+%%% request has already been completed, ignore this response. The server
+%%% MUST send a final response after the request has been completed. See
+%%% section 8.2.3 for detailed discussion of the use and handling of this
+%%% status code.
+status_continue(Req,Session) ->
+ {_,Body}=Req#request.content,
+ http_lib:send(Session#session.scheme,Session#session.socket,Body),
+ next_response_with_request(Req,Session).
+
+
+%%% Received a 300 Status code ("Multiple Choices")
+%%% The resource is located in any one of a set of locations
+%%% - If a 'Location' header is present (preserved server choice), use that
+%%% to automatically redirect to the given URL
+%%% - else if the Content-Type/Body both are non-empty let the user agent make
+%%% the choice and thus return a response with status 300
+%%% Note:
+%%% - If response to a HEAD request, the Content-Type/Body both should be empty.
+%%% - The behaviour on an empty Content-Type or Body is unspecified.
+%%% However, e.g. "Apache/1.3" servers returns both empty if the header
+%%% 'if-modified-since: Date' was sent in the request and the content is
+%%% "not modified" (instead of 304). Thus implicitly giving the cache as the
+%%% only choice.
+status_multiple_choices(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {300,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_multiple_choices(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {300,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 301 Status code ("Moved Permanently")
+%%% The resource has been assigned a new permanent URI
+%%% - If a 'Location' header is present, use that to automatically redirect to
+%%% the given URL if GET or HEAD request
+%%% - else return
+%%% Note:
+%%% - The Body should contain a short hypertext note with a hyperlink to the
+%%% new URI. Return this if Content-Type acceptable (some HTTP servers doesn't
+%%% deal properly with Accept headers)
+status_moved_permanently(Method,Headers,Body,Req,Session)
+ when (((Req#request.settings)#client_settings.autoredirect)==true) and
+ (Method==get) or (Method==head) ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {301,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_moved_permanently(_Method,Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {301,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 302 Status code ("Found")
+%%% The requested resource resides temporarily under a different URI.
+%%% Note:
+%%% - Only cacheable if indicated by a Cache-Control or Expires header
+status_found(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {302,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_found(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {302,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+%%% Received a 303 Status code ("See Other")
+%%% The request found under a different URI and should be retrieved using GET
+%%% Note:
+%%% - Must not be cached
+status_see_other(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {303,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ method=get,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_see_other(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {303,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 304 Status code ("Not Modified")
+%%% Note:
+%%% - The response MUST NOT contain a body.
+%%% - The response MUST include the following header fields:
+%%% - Date, unless its omission is required
+%%% - ETag and/or Content-Location, if the header would have been sent
+%%% in a 200 response to the same request
+%%% - Expires, Cache-Control, and/or Vary, if the field-value might
+%%% differ from that sent in any previous response for the same
+%%% variant
+status_not_modified(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {304,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_not_modified(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {304,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+
+%%% Received a 305 Status code ("Use Proxy")
+%%% The requested resource MUST be accessed through the proxy given by the
+%%% Location field
+status_use_proxy(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {305,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_use_proxy(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {305,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 307 Status code ("Temporary Redirect")
+status_temporary_redirect(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {307,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_temporary_redirect(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {307,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+
+%%% Received a 503 Status code ("Service Unavailable")
+%%% The server is currently unable to handle the request due to a
+%%% temporary overloading or maintenance of the server. The implication
+%%% is that this is a temporary condition which will be alleviated after
+%%% some delay. If known, the length of the delay MAY be indicated in a
+%%% Retry-After header. If no Retry-After is given, the client SHOULD
+%%% handle the response as it would for a 500 response.
+%% Note:
+%% - This session is now considered busy, thus cancel any requests in the
+%% pipeline and close the session.
+%% FIXME! Implement a user option to automatically retry if the 'Retry-After'
+%% header is given.
+status_service_unavailable(Resp,Req,Session) ->
+% RetryAfter=Headers#res_headers.retry_after,
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,Resp}),
+ close_session(server_connection_close,Req,Session).
+
+
+%%% Received a 50x Status code (~ "Service Error")
+%%% Response status codes beginning with the digit "5" indicate cases in
+%%% which the server is aware that it has erred or is incapable of
+%%% performing the request.
+status_server_error_50x(Resp,Req,Session) ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,Resp}),
+ close_session(server_connection_close,Req,Session).
+
+
+%%% Handles requests for redirects
+%%% The redirected request might be:
+%%% - FIXME! on another TCP session, another scheme
+%%% - on the same TCP session, same scheme
+%%% - on another TCP session , same scheme
+%%% However, in all cases treat it as a new request, with redircount updated.
+%%%
+%%% The redirect may fail, but this not a reason to close this session.
+%%% Instead return a error for this request, and continue as ok.
+handle_redirect(ClientClose,ServerClose,Req,Session) ->
+ case httpc_manager:request(Req) of
+ {ok,_ReqId} -> % FIXME Should I perhaps reuse the Reqid?
+ handle_connection(ClientClose,ServerClose,Req,Session);
+ {error,Reason} ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {error,Reason}}),
+ handle_connection(ClientClose,ServerClose,Req,Session)
+ end.
+
+%%% Check if the persistent connection flag is false (ie client request
+%%% non-persistive connection), or if the server requires a closed connection
+%%% (by sending a "Connection: close" header). If the connection required
+%%% non-persistent, we may close the connection immediately.
+handle_connection(ClientClose,ServerClose,Req,Session) ->
+ case {ClientClose,ServerClose} of
+ {false,false} ->
+ ok;
+ {false,true} -> % The server requests this session to be closed.
+ close_session(server_connection_close,Req,Session);
+ {true,_} -> % The client requested a non-persistent connection
+ close_session(client_connection_close,Req,Session)
+ end.
+
+
+%%% Close the session.
+%%% We now have three cases:
+%%% - Client request a non-persistent connection when initiating the request.
+%%% Session info not stored in httpc_manager
+%%% - Server requests a non-persistent connection when answering a request.
+%%% No need to resend request, but there might be a pipeline.
+%%% - Some kind of error
+%%% Close the session, we may then try resending all requests in the pipeline
+%%% including the current depending on the error.
+%%% FIXME! Should not always abort the session (see close_session in
+%%% httpc_manager for more details)
+close_session(client_connection_close,_Req,Session) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ stop;
+close_session(server_connection_close,Req,Session) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ httpc_manager:abort_session(Req#request.address,Session#session.id,
+ aborted_request),
+ stop.
+
+exit_session(Address,Session,Reason) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ httpc_manager:abort_session(Address,Session#session.id,Reason),
+ exit(normal).
+
+%%% This is the "normal" case to close a persistent connection. I.e., there are
+%%% no more requests waiting and the session was closed by the client, or
+%%% server because of a timeout or user request.
+exit_session_ok(Address,Session) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ exit_session_ok2(Address,Session#session.clientclose,Session#session.id).
+
+exit_session_ok2(Address,ClientClose,Sid) ->
+ case ClientClose of
+ false ->
+ httpc_manager:close_session(Address,Sid);
+ true ->
+ ok
+ end,
+ exit(normal).
+
+%%% ============================================================================
+%%% This is deprecated code, to be removed
+
+format_time() ->
+ {_,_,MicroSecs}=TS=now(),
+ {{Y,Mon,D},{H,M,S}}=calendar:now_to_universal_time(TS),
+ lists:flatten(io_lib:format("~4.4.0w-~2.2.0w-~2.2.0w,~2.2.0w:~2.2.0w:~6.3.0f",
+ [Y,Mon,D,H,M,S+(MicroSecs/1000000)])).
+
+%%% Read more data from the open socket.
+%%% Two different read functions is used because for the {active, once} socket
+%%% option is (currently) not available for SSL...
+%%% FIXME
+% read_more_data(http,Socket,Timeout) ->
+% io:format("read_more_data(ip_comm) -> "
+% "~n set active = 'once' and "
+% "await a chunk data", []),
+% http_lib:setopts(Socket, [{active,once}]),
+% read_more_data_ipcomm(Socket,Timeout);
+% read_more_data(https,Socket,Timeout) ->
+% case ssl:recv(Socket,0,Timeout) of
+% {ok,MoreData} ->
+% MoreData;
+% {error,closed} ->
+% throw({error, session_remotely_closed});
+% {error,etimedout} ->
+% throw({error, session_local_timeout});
+% {error,Reason} ->
+% throw({error, Reason});
+% Other ->
+% throw({error, Other})
+% end.
+
+% %%% Send any incoming requests on the open session immediately
+% read_more_data_ipcomm(Socket,Timeout) ->
+% receive
+% {tcp,Socket,MoreData} ->
+% % ?vtrace("read_more_data(ip_comm) -> got some data:~p",
+% % [MoreData]),
+% MoreData;
+% {tcp_closed,Socket} ->
+% % ?vtrace("read_more_data(ip_comm) -> socket closed",[]),
+% throw({error,session_remotely_closed});
+% {tcp_error,Socket,Reason} ->
+% % ?vtrace("read_more_data(ip_comm) -> ~p socket error: ~p",
+% % [self(),Reason]),
+% throw({error, Reason});
+% stop ->
+% throw({error, user_req})
+% after Timeout ->
+% throw({error, session_local_timeout})
+% end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_manager.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_manager.erl
new file mode 100644
index 0000000000..4659749270
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpc_manager.erl
@@ -0,0 +1,542 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+%% Created : 18 Dec 2001 by Johan Blom <[email protected]>
+%%
+
+-module(httpc_manager).
+
+-behaviour(gen_server).
+
+-include("http.hrl").
+
+-define(HMACALL, ?MODULE).
+-define(HMANAME, ?MODULE).
+
+%%--------------------------------------------------------------------
+%% External exports
+-export([start_link/0,start/0,
+ request/1,cancel_request/1,
+ next_request/2,
+ register_socket/3,
+ abort_session/3,close_session/2,close_session/3
+ ]).
+
+%% Debugging only
+-export([status/0]).
+
+%% gen_server callbacks
+-export([init/1,handle_call/3,handle_cast/2,handle_info/2,terminate/2,
+ code_change/3]).
+
+%%% address_db - ets() Contains mappings from a tuple {Host,Port} to a tuple
+%%% {LastSID,OpenSessions,ets()} where
+%%% LastSid is the last allocated session id,
+%%% OpenSessions is the number of currently open sessions and
+%%% ets() contains mappings from Session Id to #session{}.
+%%%
+%%% Note:
+%%% - Only persistent connections are stored in address_db
+%%% - When automatically redirecting, multiple requests are performed.
+-record(state,{
+ address_db, % ets()
+ reqid % int() Next Request id to use (identifies request).
+ }).
+
+%%====================================================================
+%% External functions
+%%====================================================================
+%%--------------------------------------------------------------------
+%% Function: start_link/0
+%% Description: Starts the server
+%%--------------------------------------------------------------------
+start() ->
+ ensure_started().
+
+start_link() ->
+ gen_server:start_link({local,?HMACALL}, ?HMANAME, [], []).
+
+
+%% Find available session process and store in address_db. If no
+%% available, start new handler process.
+request(Req) ->
+ ensure_started(),
+ ClientClose=http_lib:connection_close(Req#request.headers),
+ gen_server:call(?HMACALL,{request,ClientClose,Req},infinity).
+
+cancel_request(ReqId) ->
+ gen_server:call(?HMACALL,{cancel_request,ReqId},infinity).
+
+
+%%% Close Session
+close_session(Addr,Sid) ->
+ gen_server:call(?HMACALL,{close_session,Addr,Sid},infinity).
+close_session(Req,Addr,Sid) ->
+ gen_server:call(?HMACALL,{close_session,Req,Addr,Sid},infinity).
+
+abort_session(Addr,Sid,Msg) ->
+ gen_server:call(?HMACALL,{abort_session,Addr,Sid,Msg},infinity).
+
+
+%%% Pick next in request que
+next_request(Addr,Sid) ->
+ gen_server:call(?HMACALL,{next_request,Addr,Sid},infinity).
+
+%%% Session handler has succeded to set up a new session, now register
+%%% the socket
+register_socket(Addr,Sid,Socket) ->
+ gen_server:cast(?HMACALL,{register_socket,Addr,Sid,Socket}).
+
+
+%%% Debugging
+status() ->
+ gen_server:cast(?HMACALL,status).
+
+
+%%--------------------------------------------------------------------
+%% Function: init/1
+%% Description: Initiates the server
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% ignore |
+%% {stop, Reason}
+%%--------------------------------------------------------------------
+init([]) ->
+ process_flag(trap_exit, true),
+ {ok,#state{address_db=ets:new(address_db,[private]),
+ reqid=0}}.
+
+
+%%--------------------------------------------------------------------
+%% Function: handle_call/3
+%% Description: Handling call messages
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%% {stop, Reason, State} (terminate/2 is called)
+%%--------------------------------------------------------------------
+%%% Note:
+%%% - We may have multiple non-persistent connections, each will be handled in
+%%% separate processes, thus don't add such connections to address_db
+handle_call({request,false,Req},_From,State) ->
+ case ets:lookup(State#state.address_db,Req#request.address) of
+ [] ->
+ STab=ets:new(session_db,[private,{keypos,2},set]),
+ case persistent_new_session_request(0,Req,STab,State) of
+ {Reply,LastSid,State2} ->
+ ets:insert(State2#state.address_db,
+ {Req#request.address,{LastSid,1,STab}}),
+ {reply,Reply,State2};
+ {ErrorReply,State2} ->
+ {reply,ErrorReply,State2}
+ end;
+ [{_,{LastSid,OpenS,STab}}] ->
+ case lookup_session_entry(STab) of
+ {ok,Session} ->
+ old_session_request(Session,Req,STab,State);
+ need_new_session when OpenS<(Req#request.settings)#client_settings.max_sessions ->
+ case persistent_new_session_request(LastSid,Req,
+ STab,State) of
+ {Reply,LastSid2,State2} ->
+ ets:insert(State2#state.address_db,
+ {Req#request.address,
+ {LastSid2,OpenS+1,STab}}),
+ {reply,Reply,State2};
+ {ErrorReply,State2} ->
+ {reply,ErrorReply,State2}
+ end;
+ need_new_session ->
+ {reply,{error,too_many_sessions},State}
+ end
+ end;
+handle_call({request,true,Req},_From,State) ->
+ {Reply,State2}=not_persistent_new_session_request(Req,State),
+ {reply,Reply,State2};
+handle_call({cancel_request,true,_ReqId},_From,State) ->
+%% FIXME Should be possible to scan through all requests made, but perhaps
+%% better to give some more hints (such as Addr etc)
+ Reply=ok,
+ {reply,Reply,State};
+handle_call({next_request,Addr,Sid},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{_,_,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [] ->
+ {reply,{error,session_not_registered},State};
+ [S=#session{pipeline=[],quelength=QueLen}] ->
+ if
+ QueLen==1 ->
+ ets:insert(STab,S#session{quelength=0});
+ true ->
+ ok
+ end,
+ {reply,no_more_requests,State};
+ [S=#session{pipeline=Que}] ->
+ [Req|RevQue]=lists:reverse(Que),
+ ets:insert(STab,S#session{pipeline=lists:reverse(RevQue),
+ quelength=S#session.quelength-1}),
+ {reply,Req,State}
+ end
+ end;
+handle_call({close_session,Addr,Sid},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{LastSid,OpenS,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [#session{pipeline=Que}] ->
+ R=handle_close_session(lists:reverse(Que),STab,Sid,State),
+ ets:insert(State#state.address_db,
+ {Addr,{LastSid,OpenS-1,STab}}),
+ {reply,R,State};
+ [] ->
+ {reply,{error,session_not_registered},State}
+ end
+ end;
+handle_call({close_session,Req,Addr,Sid},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{LastSid,OpenS,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [#session{pipeline=Que}] ->
+ R=handle_close_session([Req|lists:reverse(Que)],
+ STab,Sid,State),
+ ets:insert(State#state.address_db,
+ {Addr,{LastSid,OpenS-1,STab}}),
+ {reply,R,State};
+ [] ->
+ {reply,{error,session_not_registered},State}
+ end
+ end;
+handle_call({abort_session,Addr,Sid,Msg},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{LastSid,OpenS,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [#session{pipeline=Que}] ->
+ R=abort_request_que(Que,{error,Msg}),
+ ets:delete(STab,Sid),
+ ets:insert(State#state.address_db,
+ {Addr,{LastSid,OpenS-1,STab}}),
+ {reply,R,State};
+ [] ->
+ {reply,{error,session_not_registered},State}
+ end
+ end.
+
+
+%%--------------------------------------------------------------------
+%% Function: handle_cast/2
+%% Description: Handling cast messages
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%--------------------------------------------------------------------
+handle_cast(status, State) ->
+ io:format("Status:~n"),
+ print_all(lists:sort(ets:tab2list(State#state.address_db))),
+ {noreply, State};
+handle_cast({register_socket,Addr,Sid,Socket},State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {noreply,State};
+ [{_,{_,_,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [Session] ->
+ ets:insert(STab,Session#session{socket=Socket}),
+ {noreply,State};
+ [] ->
+ {noreply,State}
+ end
+ end.
+
+print_all([]) ->
+ ok;
+print_all([{Addr,{LastSid,OpenSessions,STab}}|Rest]) ->
+ io:format(" Address:~p LastSid=~p OpenSessions=~p~n",[Addr,LastSid,OpenSessions]),
+ SortedList=lists:sort(fun(A,B) ->
+ if
+ A#session.id<B#session.id ->
+ true;
+ true ->
+ false
+ end
+ end,ets:tab2list(STab)),
+ print_all2(SortedList),
+ print_all(Rest).
+
+print_all2([]) ->
+ ok;
+print_all2([Session|Rest]) ->
+ io:format(" Session:~p~n",[Session#session.id]),
+ io:format(" Client close:~p~n",[Session#session.clientclose]),
+ io:format(" Socket:~p~n",[Session#session.socket]),
+ io:format(" Pipe: length=~p Que=~p~n",[Session#session.quelength,Session#session.pipeline]),
+ print_all2(Rest).
+
+%%--------------------------------------------------------------------
+%% Function: handle_info/2
+%% Description: Handling all non call/cast messages
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%--------------------------------------------------------------------
+handle_info({'EXIT',_Pid,normal}, State) ->
+ {noreply, State};
+handle_info(Info, State) ->
+ io:format("ERROR httpc_manager:handle_info ~p~n",[Info]),
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% Function: terminate/2
+%% Description: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%--------------------------------------------------------------------
+terminate(_Reason, State) ->
+ ets:delete(State#state.address_db).
+
+%%--------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Convert process state when code is changed
+%% Returns: {ok, NewState}
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+%%% From RFC 2616, Section 8.1.4
+%%% A client, server, or proxy MAY close the transport connection at any
+%%% time. For example, a client might have started to send a new request
+%%% at the same time that the server has decided to close the "idle"
+%%% connection. From the server's point of view, the connection is being
+%%% closed while it was idle, but from the client's point of view, a
+%%% request is in progress.
+%%%
+%%% This means that clients, servers, and proxies MUST be able to recover
+%%% from asynchronous close events. Client software SHOULD reopen the
+%%% transport connection and retransmit the aborted sequence of requests
+%%% without user interaction so long as the request sequence is
+%%% idempotent (see section 9.1.2). Non-idempotent methods or sequences
+%%%
+%%% FIXME
+%%% Note:
+%%% - If this happen (server close because of idle) there can't be any requests
+%%% in the que.
+%%% - This is the main function for closing of sessions
+handle_close_session([],STab,Sid,_State) ->
+ ets:delete(STab,Sid);
+handle_close_session(Que,STab,Sid,_State) ->
+ ets:delete(STab,Sid),
+ abort_request_que(Que,{error,aborted_request}).
+
+
+%%% From RFC 2616, Section 8.1.2.2
+%%% Clients which assume persistent connections and pipeline immediately
+%%% after connection establishment SHOULD be prepared to retry their
+%%% connection if the first pipelined attempt fails. If a client does
+%%% such a retry, it MUST NOT pipeline before it knows the connection is
+%%% persistent. Clients MUST also be prepared to resend their requests if
+%%% the server closes the connection before sending all of the
+%%% corresponding responses.
+%%% FIXME! I'm currently not checking if tis is the first attempt on the session
+%%% FIXME! Pipeline size must be dynamically variable (e.g. 0 if resend, 2 else)
+%%% The que contains requests that have been sent ok previously, but the session
+%%% was closed prematurely when reading the response.
+%%% Try setup a new session and resend these requests.
+%%% Note:
+%%% - This MUST be a persistent session
+% handle_closed_pipelined_session_que([],_State) ->
+% ok;
+% handle_closed_pipelined_session_que(_Que,_State) ->
+% ok.
+
+
+%%% From RFC 2616, Section 8.2.4
+%%% If an HTTP/1.1 client sends a request which includes a request body,
+%%% but which does not include an Expect request-header field with the
+%%% "100-continue" expectation, and if the client is not directly
+%%% connected to an HTTP/1.1 origin server, and if the client sees the
+%%% connection close before receiving any status from the server, the
+%%% client SHOULD retry the request. If the client does retry this
+%%% request, it MAY use the following "binary exponential backoff"
+%%% algorithm to be assured of obtaining a reliable response:
+%%% ...
+%%% FIXME! I'm currently not checking if a "Expect: 100-continue" has been sent.
+% handle_remotely_closed_session_que([],_State) ->
+% ok;
+% handle_remotely_closed_session_que(_Que,_State) ->
+% % resend_que(Que,Socket),
+% ok.
+
+%%% Resend all requests in the request que
+% resend_que([],_) ->
+% ok;
+% resend_que([Req|Que],Socket) ->
+% case catch httpc_handler:http_request(Req,Socket) of
+% ok ->
+% resend_que(Que,Socket);
+% {error,Reason} ->
+% {error,Reason}
+% end.
+
+
+%%% From RFC 2616,
+%%% Section 8.1.2.2:
+%%% Clients SHOULD NOT pipeline requests using non-idempotent methods or
+%%% non-idempotent sequences of methods (see section 9.1.2). Otherwise, a
+%%% premature termination of the transport connection could lead to
+%%% indeterminate results. A client wishing to send a non-idempotent
+%%% request SHOULD wait to send that request until it has received the
+%%% response status for the previous request.
+%%% Section 9.1.2:
+%%% Methods can also have the property of "idempotence" in that (aside
+%%% from error or expiration issues) the side-effects of N > 0 identical
+%%% requests is the same as for a single request. The methods GET, HEAD,
+%%% PUT and DELETE share this property. Also, the methods OPTIONS and
+%%% TRACE SHOULD NOT have side effects, and so are inherently idempotent.
+%%%
+%%% Note that POST and CONNECT are idempotent methods.
+%%%
+%%% Tries to find an open, free session i STab. Such a session has quelength
+%%% less than ?MAX_PIPELINE_LENGTH
+%%% Don't care about non-standard, user defined methods.
+%%%
+%%% Returns {ok,Session} or need_new_session where
+%%% Session is the session that may be used
+lookup_session_entry(STab) ->
+ MS=[{#session{quelength='$1',max_quelength='$2',
+ id='_',clientclose='_',socket='$3',scheme='_',pipeline='_'},
+ [{'<','$1','$2'},{is_port,'$3'}],
+ ['$_']}],
+ case ets:select(STab,MS) of
+ [] ->
+ need_new_session;
+ SessionList -> % Now check if any of these has an empty pipeline.
+ case lists:keysearch(0,2,SessionList) of
+ {value,Session} ->
+ {ok,Session};
+ false ->
+ {ok,hd(SessionList)}
+ end
+ end.
+
+
+%%% Returns a tuple {Reply,State} where
+%%% Reply is the response sent back to the application
+%%%
+%%% Note:
+%%% - An {error,einval} from a send should sometimes rather be {error,closed}
+%%% - Don't close the session from here, let httpc_handler take care of that.
+%old_session_request(Session,Req,STab,State)
+% when (Req#request.settings)#client_settings.max_quelength==0 ->
+% Session1=Session#session{pipeline=[Req]},
+% ets:insert(STab,Session1),
+% {reply,{ok,ReqId},State#state{reqid=ReqId+1}};
+old_session_request(Session,Req,STab,State) ->
+ ReqId=State#state.reqid,
+ Req1=Req#request{id=ReqId},
+ case catch httpc_handler:http_request(Req1,Session#session.socket) of
+ ok ->
+ Session1=Session#session{pipeline=[Req1|Session#session.pipeline],
+ quelength=Session#session.quelength+1},
+ ets:insert(STab,Session1),
+ {reply,{ok,ReqId},State#state{reqid=ReqId+1}};
+ {error,Reason} ->
+ ets:insert(STab,Session#session{socket=undefined}),
+% http_lib:close(Session#session.sockettype,Session#session.socket),
+ {reply,{error,Reason},State#state{reqid=ReqId+1}}
+ end.
+
+%%% Returns atuple {Reply,Sid,State} where
+%%% Reply is the response sent back to the application, and
+%%% Sid is the last used Session Id
+persistent_new_session_request(Sid,Req,STab,State) ->
+ ReqId=State#state.reqid,
+ case setup_new_session(Req#request{id=ReqId},false,Sid) of
+ {error,Reason} ->
+ {{error,Reason},State#state{reqid=ReqId+1}};
+ {NewSid,Session} ->
+ ets:insert(STab,Session),
+ {{ok,ReqId},NewSid,State#state{reqid=ReqId+1}}
+ end.
+
+%%% Returns a tuple {Reply,State} where
+%%% Reply is the response sent back to the application
+not_persistent_new_session_request(Req,State) ->
+ ReqId=State#state.reqid,
+ case setup_new_session(Req#request{id=ReqId},true,undefined) of
+ {error,Reason} ->
+ {{error,Reason},State#state{reqid=ReqId+1}};
+ ok ->
+ {{ok,ReqId},State#state{reqid=ReqId+1}}
+ end.
+
+%%% As there are no sessions available, setup a new session and send the request
+%%% on it.
+setup_new_session(Req,ClientClose,Sid) ->
+ S=#session{id=Sid,clientclose=ClientClose,
+ scheme=Req#request.scheme,
+ max_quelength=(Req#request.settings)#client_settings.max_quelength},
+ spawn_link(httpc_handler,init_connection,[Req,S]),
+ case ClientClose of
+ false ->
+ {Sid+1,S};
+ true ->
+ ok
+ end.
+
+
+%%% ----------------------------------------------------------------------------
+%%% Abort all requests in the request que.
+abort_request_que([],_Msg) ->
+ ok;
+abort_request_que([#request{from=From,ref=Ref,id=Id}|Que],Msg) ->
+ gen_server:cast(From,{Ref,Id,Msg}),
+ abort_request_que(Que,Msg);
+abort_request_que(#request{from=From,ref=Ref,id=Id},Msg) ->
+ gen_server:cast(From,{Ref,Id,Msg}).
+
+
+%%% --------------------------------
+% C={httpc_manager,{?MODULE,start_link,[]},permanent,1000,
+% worker,[?MODULE]},
+% supervisor:start_child(inets_sup, C),
+ensure_started() ->
+ case whereis(?HMANAME) of
+ undefined ->
+ start_link();
+ _ ->
+ ok
+ end.
+
+
+%%% ============================================================================
+%%% This is deprecated code, to be removed
+
+% format_time() ->
+% {_,_,MicroSecs}=TS=now(),
+% {{Y,Mon,D},{H,M,S}}=calendar:now_to_universal_time(TS),
+% lists:flatten(io_lib:format("~4.4.0w-~2.2.0w-~2.2.0w,~2.2.0w:~2.2.0w:~6.3.0f",
+% [Y,Mon,D,H,M,S+(MicroSecs/1000000)])).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.erl
new file mode 100644
index 0000000000..8cc1c133e9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.erl
@@ -0,0 +1,596 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+-module(httpd).
+-export([multi_start/1, multi_start_link/1,
+ start/0, start/1, start/2,
+ start_link/0, start_link/1, start_link/2,
+ start_child/0,start_child/1,
+ multi_stop/1,
+ stop/0,stop/1,stop/2,
+ stop_child/0,stop_child/1,stop_child/2,
+ multi_restart/1,
+ restart/0,restart/1,restart/2,
+ parse_query/1]).
+
+%% Optional start related stuff...
+-export([load/1, load_mime_types/1,
+ start2/1, start2/2,
+ start_link2/1, start_link2/2,
+ stop2/1]).
+
+%% Management stuff
+-export([block/0,block/1,block/2,block/3,block/4,
+ unblock/0,unblock/1,unblock/2]).
+
+%% Debugging and status info stuff...
+-export([verbosity/3,verbosity/4]).
+-export([get_status/1,get_status/2,get_status/3,
+ get_admin_state/0,get_admin_state/1,get_admin_state/2,
+ get_usage_state/0,get_usage_state/1,get_usage_state/2]).
+
+-include("httpd.hrl").
+
+-define(D(F, A), io:format("~p:" ++ F ++ "~n", [?MODULE|A])).
+
+
+%% start
+
+start() ->
+ start("/var/tmp/server_root/conf/8888.conf").
+
+start(ConfigFile) ->
+ %% ?D("start(~s) -> entry", [ConfigFile]),
+ start(ConfigFile, []).
+
+start(ConfigFile, Verbosity) when list(ConfigFile), list(Verbosity) ->
+ httpd_sup:start(ConfigFile, Verbosity).
+
+
+%% start_link
+
+start_link() ->
+ start("/var/tmp/server_root/conf/8888.conf").
+
+start_link(ConfigFile) ->
+ start_link(ConfigFile, []).
+
+start_link(ConfigFile, Verbosity) when list(ConfigFile), list(Verbosity) ->
+ httpd_sup:start_link(ConfigFile, Verbosity).
+
+
+%% start2 & start_link2
+
+start2(Config) ->
+ start2(Config, []).
+
+start2(Config, Verbosity) when list(Config), list(Verbosity) ->
+ httpd_sup:start2(Config, Verbosity).
+
+start_link2(Config) ->
+ start_link2(Config, []).
+
+start_link2(Config, Verbosity) when list(Config), list(Verbosity) ->
+ httpd_sup:start_link2(Config, Verbosity).
+
+
+%% stop
+
+stop() ->
+ stop(8888).
+
+stop(Port) when integer(Port) ->
+ stop(undefined, Port);
+stop(Pid) when pid(Pid) ->
+ httpd_sup:stop(Pid);
+stop(ConfigFile) when list(ConfigFile) ->
+ %% ?D("stop(~s) -> entry", [ConfigFile]),
+ httpd_sup:stop(ConfigFile).
+
+stop(Addr, Port) when integer(Port) ->
+ httpd_sup:stop(Addr, Port).
+
+stop2(Config) when list(Config) ->
+ httpd_sup:stop2(Config).
+
+%% start_child
+
+start_child() ->
+ start_child("/var/tmp/server_root/conf/8888.conf").
+
+start_child(ConfigFile) ->
+ start_child(ConfigFile, []).
+
+start_child(ConfigFile, Verbosity) ->
+ inets_sup:start_child(ConfigFile, Verbosity).
+
+
+%% stop_child
+
+stop_child() ->
+ stop_child(8888).
+
+stop_child(Port) ->
+ stop_child(undefined,Port).
+
+stop_child(Addr, Port) when integer(Port) ->
+ inets_sup:stop_child(Addr, Port).
+
+
+%% multi_start
+
+multi_start(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mstart(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mstart(ConfigFiles) ->
+ mstart(ConfigFiles,[]).
+mstart([],Results) ->
+ {ok,lists:reverse(Results)};
+mstart([H|T],Results) ->
+ Res = start(H),
+ mstart(T,[Res|Results]).
+
+
+%% multi_start_link
+
+multi_start_link(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mstart_link(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mstart_link(ConfigFiles) ->
+ mstart_link(ConfigFiles,[]).
+mstart_link([],Results) ->
+ {ok,lists:reverse(Results)};
+mstart_link([H|T],Results) ->
+ Res = start_link(H),
+ mstart_link(T,[Res|Results]).
+
+
+%% multi_stop
+
+multi_stop(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mstop(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mstop(ConfigFiles) ->
+ mstop(ConfigFiles,[]).
+mstop([],Results) ->
+ {ok,lists:reverse(Results)};
+mstop([H|T],Results) ->
+ Res = stop(H),
+ mstop(T,[Res|Results]).
+
+
+%% multi_restart
+
+multi_restart(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mrestart(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mrestart(ConfigFiles) ->
+ mrestart(ConfigFiles,[]).
+mrestart([],Results) ->
+ {ok,lists:reverse(Results)};
+mrestart([H|T],Results) ->
+ Res = restart(H),
+ mrestart(T,[Res|Results]).
+
+
+%% restart
+
+restart() -> restart(undefined,8888).
+
+restart(Port) when integer(Port) ->
+ restart(undefined,Port);
+restart(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ restart(Addr,Port);
+ Error ->
+ Error
+ end.
+
+
+restart(Addr,Port) when integer(Port) ->
+ do_restart(Addr,Port).
+
+do_restart(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:restart(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+%%% =========================================================
+%%% Function: block/0, block/1, block/2, block/3, block/4
+%%% block()
+%%% block(Port)
+%%% block(ConfigFile)
+%%% block(Addr,Port)
+%%% block(Port,Mode)
+%%% block(ConfigFile,Mode)
+%%% block(Addr,Port,Mode)
+%%% block(ConfigFile,Mode,Timeout)
+%%% block(Addr,Port,Mode,Timeout)
+%%%
+%%% Returns: ok | {error,Reason}
+%%%
+%%% Description: This function is used to block an HTTP server.
+%%% The blocking can be done in two ways,
+%%% disturbing or non-disturbing. Default is disturbing.
+%%% When a HTTP server is blocked, all requests are rejected
+%%% (status code 503).
+%%%
+%%% disturbing:
+%%% By performing a disturbing block, the server
+%%% is blocked forcefully and all ongoing requests
+%%% are terminated. No new connections are accepted.
+%%% If a timeout time is given then, on-going requests
+%%% are given this much time to complete before the
+%%% server is forcefully blocked. In this case no new
+%%% connections is accepted.
+%%%
+%%% non-disturbing:
+%%% A non-disturbing block is more gracefull. No
+%%% new connections are accepted, but the ongoing
+%%% requests are allowed to complete.
+%%% If a timeout time is given, it waits this long before
+%%% giving up (the block operation is aborted and the
+%%% server state is once more not-blocked).
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% ConfigFile -> string()
+%%% Mode -> disturbing | non_disturbing
+%%% Timeout -> integer()
+%%%
+block() -> block(undefined,8888,disturbing).
+
+block(Port) when integer(Port) ->
+ block(undefined,Port,disturbing);
+
+block(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ block(Addr,Port,disturbing);
+ Error ->
+ Error
+ end.
+
+block(Addr,Port) when integer(Port) ->
+ block(Addr,Port,disturbing);
+
+block(Port,Mode) when integer(Port), atom(Mode) ->
+ block(undefined,Port,Mode);
+
+block(ConfigFile,Mode) when list(ConfigFile), atom(Mode) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ block(Addr,Port,Mode);
+ Error ->
+ Error
+ end.
+
+
+block(Addr,Port,disturbing) when integer(Port) ->
+ do_block(Addr,Port,disturbing);
+block(Addr,Port,non_disturbing) when integer(Port) ->
+ do_block(Addr,Port,non_disturbing);
+
+block(ConfigFile,Mode,Timeout) when list(ConfigFile), atom(Mode), integer(Timeout) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ block(Addr,Port,Mode,Timeout);
+ Error ->
+ Error
+ end.
+
+
+block(Addr,Port,non_disturbing,Timeout) when integer(Port), integer(Timeout) ->
+ do_block(Addr,Port,non_disturbing,Timeout);
+block(Addr,Port,disturbing,Timeout) when integer(Port), integer(Timeout) ->
+ do_block(Addr,Port,disturbing,Timeout).
+
+do_block(Addr,Port,Mode) when integer(Port), atom(Mode) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:block(Pid,Mode);
+ _ ->
+ {error,not_started}
+ end.
+
+
+do_block(Addr,Port,Mode,Timeout) when integer(Port), atom(Mode) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:block(Pid,Mode,Timeout);
+ _ ->
+ {error,not_started}
+ end.
+
+
+%%% =========================================================
+%%% Function: unblock/0, unblock/1, unblock/2
+%%% unblock()
+%%% unblock(Port)
+%%% unblock(ConfigFile)
+%%% unblock(Addr,Port)
+%%%
+%%% Description: This function is used to reverse a previous block
+%%% operation on the HTTP server.
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% ConfigFile -> string()
+%%%
+unblock() -> unblock(undefined,8888).
+unblock(Port) when integer(Port) -> unblock(undefined,Port);
+
+unblock(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ unblock(Addr,Port);
+ Error ->
+ Error
+ end.
+
+unblock(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:unblock(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+verbosity(Port,Who,Verbosity) ->
+ verbosity(undefined,Port,Who,Verbosity).
+
+verbosity(Addr,Port,Who,Verbosity) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:verbosity(Pid,Who,Verbosity);
+ _ ->
+ not_started
+ end.
+
+
+%%% =========================================================
+%%% Function: get_admin_state/0, get_admin_state/1, get_admin_state/2
+%%% get_admin_state()
+%%% get_admin_state(Port)
+%%% get_admin_state(Addr,Port)
+%%%
+%%% Returns: {ok,State} | {error,Reason}
+%%%
+%%% Description: This function is used to retrieve the administrative
+%%% state of the HTTP server.
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% State -> unblocked | shutting_down | blocked
+%%% Reason -> term()
+%%%
+get_admin_state() -> get_admin_state(undefined,8888).
+get_admin_state(Port) when integer(Port) -> get_admin_state(undefined,Port);
+
+get_admin_state(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ unblock(Addr,Port);
+ Error ->
+ Error
+ end.
+
+get_admin_state(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:get_admin_state(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+
+%%% =========================================================
+%%% Function: get_usage_state/0, get_usage_state/1, get_usage_state/2
+%%% get_usage_state()
+%%% get_usage_state(Port)
+%%% get_usage_state(Addr,Port)
+%%%
+%%% Returns: {ok,State} | {error,Reason}
+%%%
+%%% Description: This function is used to retrieve the usage
+%%% state of the HTTP server.
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% State -> idle | active | busy
+%%% Reason -> term()
+%%%
+get_usage_state() -> get_usage_state(undefined,8888).
+get_usage_state(Port) when integer(Port) -> get_usage_state(undefined,Port);
+
+get_usage_state(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ unblock(Addr,Port);
+ Error ->
+ Error
+ end.
+
+get_usage_state(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:get_usage_state(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+
+%%% =========================================================
+%% Function: get_status(ConfigFile) -> Status
+%% get_status(Port) -> Status
+%% get_status(Addr,Port) -> Status
+%% get_status(Port,Timeout) -> Status
+%% get_status(Addr,Port,Timeout) -> Status
+%%
+%% Arguments: ConfigFile -> string()
+%% Configuration file from which Port and
+%% BindAddress will be extracted.
+%% Addr -> {A,B,C,D} | string()
+%% Bind Address of the http server
+%% Port -> integer()
+%% Port number of the http server
+%% Timeout -> integer()
+%% Timeout time for the call
+%%
+%% Returns: Status -> list()
+%%
+%% Description: This function is used when the caller runs in the
+%% same node as the http server or if calling with a
+%% program such as erl_call (see erl_interface).
+%%
+
+get_status(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ get_status(Addr,Port);
+ Error ->
+ Error
+ end;
+
+get_status(Port) when integer(Port) ->
+ get_status(undefined,Port,5000).
+
+get_status(Port,Timeout) when integer(Port), integer(Timeout) ->
+ get_status(undefined,Port,Timeout);
+
+get_status(Addr,Port) when list(Addr), integer(Port) ->
+ get_status(Addr,Port,5000).
+
+get_status(Addr,Port,Timeout) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:get_status(Pid,Timeout);
+ _ ->
+ not_started
+ end.
+
+
+%% load config
+
+load(ConfigFile) ->
+ httpd_conf:load(ConfigFile).
+
+load_mime_types(MimeTypesFile) ->
+ httpd_conf:load_mime_types(MimeTypesFile).
+
+
+%% parse_query
+
+parse_query(String) ->
+ {ok, SplitString} = regexp:split(String,"[&;]"),
+ foreach(SplitString).
+
+foreach([]) ->
+ [];
+foreach([KeyValue|Rest]) ->
+ {ok, Plus2Space, _} = regexp:gsub(KeyValue,"[\+]"," "),
+ case regexp:split(Plus2Space,"=") of
+ {ok,[Key|Value]} ->
+ [{httpd_util:decode_hex(Key),
+ httpd_util:decode_hex(lists:flatten(Value))}|foreach(Rest)];
+ {ok,_} ->
+ foreach(Rest)
+ end.
+
+
+%% get_addr_and_port
+
+get_addr_and_port(ConfigFile) ->
+ case httpd_conf:load(ConfigFile) of
+ {ok,ConfigList} ->
+ Port = httpd_util:key1search(ConfigList,port,80),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ {ok,Addr,Port};
+ Error ->
+ Error
+ end.
+
+
+%% make_name
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd",Addr,Port).
+
+
+%% Multi stuff
+%%
+
+read_multi_file(File) ->
+ read_mfile(file:open(File,[read])).
+
+read_mfile({ok,Fd}) ->
+ read_mfile(read_line(Fd),Fd,[]);
+read_mfile(Error) ->
+ Error.
+
+read_mfile(eof,_Fd,SoFar) ->
+ {ok,lists:reverse(SoFar)};
+read_mfile({error,Reason},_Fd,SoFar) ->
+ {error,Reason};
+read_mfile([$#|Comment],Fd,SoFar) ->
+ read_mfile(read_line(Fd),Fd,SoFar);
+read_mfile([],Fd,SoFar) ->
+ read_mfile(read_line(Fd),Fd,SoFar);
+read_mfile(Line,Fd,SoFar) ->
+ read_mfile(read_line(Fd),Fd,[Line|SoFar]).
+
+read_line(Fd) -> read_line1(io:get_line(Fd,[])).
+read_line1(eof) -> eof;
+read_line1(String) -> httpd_conf:clean(String).
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.hrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.hrl
new file mode 100644
index 0000000000..ba21bdf638
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd.hrl
@@ -0,0 +1,77 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd.hrl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+
+-include_lib("kernel/include/file.hrl").
+
+-ifndef(SERVER_SOFTWARE).
+-define(SERVER_SOFTWARE,"inets/develop"). % Define in Makefile!
+-endif.
+-define(SERVER_PROTOCOL,"HTTP/1.1").
+-define(SOCKET_CHUNK_SIZE,8192).
+-define(SOCKET_MAX_POLL,25).
+-define(FILE_CHUNK_SIZE,64*1024).
+-define(NICE(Reason),lists:flatten(atom_to_list(?MODULE)++": "++Reason)).
+-define(DEFAULT_CONTEXT,
+ [{errmsg,"[an error occurred while processing this directive]"},
+ {timefmt,"%A, %d-%b-%y %T %Z"},
+ {sizefmt,"abbrev"}]).
+
+
+-ifdef(inets_error).
+-define(ERROR(Format, Args), io:format("E(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(ERROR(F,A),[]).
+-endif.
+
+-ifdef(inets_log).
+-define(LOG(Format, Args), io:format("L(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(LOG(F,A),[]).
+-endif.
+
+-ifdef(inets_debug).
+-define(DEBUG(Format, Args), io:format("D(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(DEBUG(F,A),[]).
+-endif.
+
+-ifdef(inets_cdebug).
+-define(CDEBUG(Format, Args), io:format("C(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(CDEBUG(F,A),[]).
+-endif.
+
+
+-record(init_data,{peername,resolve}).
+-record(mod,{init_data,
+ data=[],
+ socket_type=ip_comm,
+ socket,
+ config_db,
+ method,
+ absolute_uri=[],
+ request_uri,
+ http_version,
+ request_line,
+ parsed_header=[],
+ entity_body,
+ connection}).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor.erl
new file mode 100644
index 0000000000..9b88f84865
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor.erl
@@ -0,0 +1,176 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_acceptor.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+-module(httpd_acceptor).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+
+%% External API
+-export([start_link/6]).
+
+%% Other exports (for spawn's etc.)
+-export([acceptor/4, acceptor/7]).
+
+
+%%
+%% External API
+%%
+
+%% start_link
+
+start_link(Manager, SocketType, Addr, Port, ConfigDb, Verbosity) ->
+ Args = [self(), Manager, SocketType, Addr, Port, ConfigDb, Verbosity],
+ proc_lib:start_link(?MODULE, acceptor, Args).
+
+
+acceptor(Parent, Manager, SocketType, Addr, Port, ConfigDb, Verbosity) ->
+ put(sname,acc),
+ put(verbosity,Verbosity),
+ ?vlog("starting",[]),
+ case (catch do_init(SocketType, Addr, Port)) of
+ {ok, ListenSocket} ->
+ proc_lib:init_ack(Parent, {ok, self()}),
+ acceptor(Manager, SocketType, ListenSocket, ConfigDb);
+ Error ->
+ proc_lib:init_ack(Parent, Error),
+ error
+ end.
+
+do_init(SocketType, Addr, Port) ->
+ do_socket_start(SocketType),
+ ListenSocket = do_socket_listen(SocketType, Addr, Port),
+ {ok, ListenSocket}.
+
+
+do_socket_start(SocketType) ->
+ case httpd_socket:start(SocketType) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ ?vinfo("failed socket start: ~p",[Reason]),
+ throw({error, {socket_start_failed, Reason}})
+ end.
+
+
+do_socket_listen(SocketType, Addr, Port) ->
+ case httpd_socket:listen(SocketType, Addr, Port) of
+ {error, Reason} ->
+ ?vinfo("failed socket listen operation: ~p", [Reason]),
+ throw({error, {listen, Reason}});
+ ListenSocket ->
+ ListenSocket
+ end.
+
+
+%% acceptor
+
+acceptor(Manager, SocketType, ListenSocket, ConfigDb) ->
+ ?vdebug("await connection",[]),
+ case (catch httpd_socket:accept(SocketType, ListenSocket, 30000)) of
+ {error, Reason} ->
+ handle_error(Reason, ConfigDb, SocketType),
+ ?MODULE:acceptor(Manager, SocketType, ListenSocket, ConfigDb);
+
+ {'EXIT', Reason} ->
+ handle_error({'EXIT', Reason}, ConfigDb, SocketType),
+ ?MODULE:acceptor(Manager, SocketType, ListenSocket, ConfigDb);
+
+ Socket ->
+ handle_connection(Manager, ConfigDb, SocketType, Socket),
+ ?MODULE:acceptor(Manager, SocketType, ListenSocket, ConfigDb)
+ end.
+
+
+handle_connection(Manager, ConfigDb, SocketType, Socket) ->
+ case httpd_request_handler:start_link(Manager, ConfigDb) of
+ {ok, Pid} ->
+ httpd_socket:controlling_process(SocketType, Socket, Pid),
+ httpd_request_handler:synchronize(Pid, SocketType, Socket);
+ {error, Reason} ->
+ handle_connection_err(SocketType, Socket, ConfigDb, Reason)
+ end.
+
+
+handle_connection_err(SocketType, Socket, ConfigDb, Reason) ->
+ String =
+ lists:flatten(
+ io_lib:format("failed starting request handler:~n ~p", [Reason])),
+ report_error(ConfigDb, String),
+ httpd_socket:close(SocketType, Socket).
+
+
+handle_error(timeout, _, _) ->
+ ?vtrace("Accept timeout",[]),
+ ok;
+
+handle_error({enfile, _}, _, _) ->
+ ?vinfo("Accept error: enfile",[]),
+ %% Out of sockets...
+ sleep(200);
+
+handle_error(emfile, _, _) ->
+ ?vinfo("Accept error: emfile",[]),
+ %% Too many open files -> Out of sockets...
+ sleep(200);
+
+handle_error(closed, _, _) ->
+ ?vlog("Accept error: closed",[]),
+ %% This propably only means that the application is stopping,
+ %% but just in case
+ exit(closed);
+
+handle_error(econnaborted, _, _) ->
+ ?vlog("Accept aborted",[]),
+ ok;
+
+handle_error(esslaccept, _, _) ->
+ %% The user has selected to cancel the installation of
+ %% the certifikate, This is not a real error, so we do
+ %% not write an error message.
+ ok;
+
+handle_error({'EXIT', Reason}, ConfigDb, SocketType) ->
+ ?vinfo("Accept exit:~n ~p",[Reason]),
+ String = lists:flatten(io_lib:format("Accept exit: ~p", [Reason])),
+ accept_failed(SocketType, ConfigDb, String);
+
+handle_error(Reason, ConfigDb, SocketType) ->
+ ?vinfo("Accept error:~n ~p",[Reason]),
+ String = lists:flatten(io_lib:format("Accept error: ~p", [Reason])),
+ accept_failed(SocketType, ConfigDb, String).
+
+
+accept_failed(SocketType, ConfigDb, String) ->
+ error_logger:error_report(String),
+ mod_log:error_log(SocketType, undefined, ConfigDb,
+ {0, "unknown"}, String),
+ mod_disk_log:error_log(SocketType, undefined, ConfigDb,
+ {0, "unknown"}, String),
+ exit({accept_failed, String}).
+
+
+report_error(Db, String) ->
+ error_logger:error_report(String),
+ mod_log:report_error(Db, String),
+ mod_disk_log:report_error(Db, String).
+
+
+sleep(T) -> receive after T -> ok end.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor_sup.erl
new file mode 100644
index 0000000000..e408614f1c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_acceptor_sup.erl
@@ -0,0 +1,118 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_acceptor_sup.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The top supervisor for the Megaco/H.248 application
+%%----------------------------------------------------------------------
+
+-module(httpd_acceptor_sup).
+
+-behaviour(supervisor).
+
+-include("httpd_verbosity.hrl").
+
+%% public
+-export([start/3, stop/1, init/1]).
+
+-export([start_acceptor/4, stop_acceptor/2]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% supervisor callback functions
+
+
+start(Addr, Port, AccSupVerbosity) ->
+ SupName = make_name(Addr, Port),
+ supervisor:start_link({local, SupName}, ?MODULE, [AccSupVerbosity]).
+
+stop(StartArgs) ->
+ ok.
+
+init([Verbosity]) -> % Supervisor
+ do_init(Verbosity);
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+do_init(Verbosity) ->
+ put(verbosity,?vvalidate(Verbosity)),
+ put(sname,acc_sup),
+ ?vlog("starting", []),
+ Flags = {one_for_one, 500, 100},
+ KillAfter = timer:seconds(1),
+ Workers = [],
+ {ok, {Flags, Workers}}.
+
+
+%%----------------------------------------------------------------------
+%% Function: [start|stop]_acceptor/5
+%% Description: Starts a [auth | security] worker (child) process
+%%----------------------------------------------------------------------
+
+start_acceptor(SocketType, Addr, Port, ConfigDb) ->
+ Verbosity = get_acc_verbosity(),
+ start_worker(httpd_acceptor, SocketType, Addr, Port,
+ ConfigDb, Verbosity, self(), []).
+
+stop_acceptor(Addr, Port) ->
+ stop_worker(httpd_acceptor, Addr, Port).
+
+
+%%----------------------------------------------------------------------
+%% Function: start_worker/5
+%% Description: Starts a (permanent) worker (child) process
+%%----------------------------------------------------------------------
+
+start_worker(M, SocketType, Addr, Port, ConfigDB, Verbosity, Manager,
+ Modules) ->
+ SupName = make_name(Addr, Port),
+ Args = [Manager, SocketType, Addr, Port, ConfigDB, Verbosity],
+ Spec = {{M, Addr, Port},
+ {M, start_link, Args},
+ permanent, timer:seconds(1), worker, [M] ++ Modules},
+ supervisor:start_child(SupName, Spec).
+
+
+%%----------------------------------------------------------------------
+%% Function: stop_permanent_worker/3
+%% Description: Stops a permanent worker (child) process
+%%----------------------------------------------------------------------
+
+stop_worker(M, Addr, Port) ->
+ SupName = make_name(Addr, Port),
+ Name = {M, Addr, Port},
+ case supervisor:terminate_child(SupName, Name) of
+ ok ->
+ supervisor:delete_child(SupName, Name);
+ Error ->
+ Error
+ end.
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_acc_sup",Addr,Port).
+
+
+
+get_acc_verbosity() ->
+ get_verbosity(get(acceptor_verbosity)).
+
+get_verbosity(undefined) ->
+ ?default_verbosity;
+get_verbosity(V) ->
+ ?vvalidate(V).
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_conf.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_conf.erl
new file mode 100644
index 0000000000..2c7a747d42
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_conf.erl
@@ -0,0 +1,688 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_conf.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+-module(httpd_conf).
+-export([load/1, load_mime_types/1,
+ load/2, store/1, store/2,
+ remove_all/1, remove/1,
+ is_directory/1, is_file/1,
+ make_integer/1, clean/1, custom_clean/3, check_enum/2]).
+
+
+-define(VMODULE,"CONF").
+-include("httpd_verbosity.hrl").
+
+%% The configuration data is handled in three (3) phases:
+%% 1. Parse the config file and put all directives into a key-vale
+%% tuple list (load/1).
+%% 2. Traverse the key-value tuple list store it into an ETS table.
+%% Directives depending on other directives are taken care of here
+%% (store/1).
+%% 3. Traverse the ETS table and do a complete clean-up (remove/1).
+
+-include("httpd.hrl").
+
+%%
+%% Phase 1: Load
+%%
+
+%% load
+
+load(ConfigFile) ->
+ ?CDEBUG("load -> ConfigFile: ~p",[ConfigFile]),
+ case read_config_file(ConfigFile) of
+ {ok, Config} ->
+ case bootstrap(Config) of
+ {error, Reason} ->
+ {error, Reason};
+ {ok, Modules} ->
+ load_config(Config, lists:append(Modules, [?MODULE]))
+ end;
+ {error, Reason} ->
+ {error, ?NICE("Error while reading config file: "++Reason)}
+ end.
+
+
+bootstrap([]) ->
+ {error, ?NICE("Modules must be specified in the config file")};
+bootstrap([Line|Config]) ->
+ case Line of
+ [$M,$o,$d,$u,$l,$e,$s,$ |Modules] ->
+ {ok, ModuleList} = regexp:split(Modules," "),
+ TheMods = [list_to_atom(X) || X <- ModuleList],
+ case verify_modules(TheMods) of
+ ok ->
+ {ok, TheMods};
+ {error, Reason} ->
+ ?ERROR("bootstrap -> : validation failed: ~p",[Reason]),
+ {error, Reason}
+ end;
+ _ ->
+ bootstrap(Config)
+ end.
+
+
+%%
+%% verify_modules/1 -> ok | {error, Reason}
+%%
+%% Verifies that all specified modules are available.
+%%
+verify_modules([]) ->
+ ok;
+verify_modules([Mod|Rest]) ->
+ case code:which(Mod) of
+ non_existing ->
+ {error, ?NICE(atom_to_list(Mod)++" does not exist")};
+ Path ->
+ verify_modules(Rest)
+ end.
+
+%%
+%% read_config_file/1 -> {ok, [line(), line()..]} | {error, Reason}
+%%
+%% Reads the entire configuration file and returns list of strings or
+%% and error.
+%%
+
+
+read_config_file(FileName) ->
+ case file:open(FileName, [read]) of
+ {ok, Stream} ->
+ read_config_file(Stream, []);
+ {error, Reason} ->
+ {error, ?NICE("Cannot open "++FileName)}
+ end.
+
+read_config_file(Stream, SoFar) ->
+ case io:get_line(Stream, []) of
+ eof ->
+ {ok, lists:reverse(SoFar)};
+ {error, Reason} ->
+ {error, Reason};
+ [$#|Rest] ->
+ %% Ignore commented lines for efficiency later ..
+ read_config_file(Stream, SoFar);
+ Line ->
+ {ok, NewLine, _}=regexp:sub(clean(Line),"[\t\r\f ]"," "),
+ case NewLine of
+ [] ->
+ %% Also ignore empty lines ..
+ read_config_file(Stream, SoFar);
+ Other ->
+ read_config_file(Stream, [NewLine|SoFar])
+ end
+ end.
+
+is_exported(Module, ToFind) ->
+ Exports = Module:module_info(exports),
+ lists:member(ToFind, Exports).
+
+%%
+%% load/4 -> {ok, ConfigList} | {error, Reason}
+%%
+%% This loads the config file into each module specified by Modules
+%% Each module has its own context that is passed to and (optionally)
+%% returned by the modules load function. The module can also return
+%% a ConfigEntry, which will be added to the global configuration
+%% list.
+%% All configuration directives are guaranteed to be passed to all
+%% modules. Each module only implements the function clauses of
+%% the load function for the configuration directives it supports,
+%% it's ok if an apply returns {'EXIT', {function_clause, ..}}.
+%%
+load_config(Config, Modules) ->
+ %% Create default contexts for all modules
+ Contexts = lists:duplicate(length(Modules), []),
+ load_config(Config, Modules, Contexts, []).
+
+
+load_config([], _Modules, _Contexts, ConfigList) ->
+ case a_must(ConfigList, [server_name,port,server_root,document_root]) of
+ ok ->
+ {ok, ConfigList};
+ {missing, Directive} ->
+ {error, ?NICE(atom_to_list(Directive)++
+ " must be specified in the config file")}
+ end;
+
+load_config([Line|Config], Modules, Contexts, ConfigList) ->
+ ?CDEBUG("load_config -> Line: ~p",[Line]),
+ case load_traverse(Line, Contexts, Modules, [], ConfigList, no) of
+ {ok, NewContexts, NewConfigList} ->
+ load_config(Config, Modules, NewContexts, NewConfigList);
+ {error, Reason} ->
+ ?ERROR("load_config -> traverse failed: ~p",[Reason]),
+ {error, Reason}
+ end.
+
+
+load_traverse(Line, [], [], NewContexts, ConfigList, no) ->
+ ?CDEBUG("load_traverse/no -> ~n"
+ " Line: ~p~n"
+ " NewContexts: ~p~n"
+ " ConfigList: ~p",
+ [Line,NewContexts,ConfigList]),
+ {error, ?NICE("Configuration directive not recognized: "++Line)};
+load_traverse(Line, [], [], NewContexts, ConfigList, yes) ->
+ ?CDEBUG("load_traverse/yes -> ~n"
+ " Line: ~p~n"
+ " NewContexts: ~p~n"
+ " ConfigList: ~p",
+ [Line,NewContexts,ConfigList]),
+ {ok, lists:reverse(NewContexts), ConfigList};
+load_traverse(Line, [Context|Contexts], [Module|Modules], NewContexts, ConfigList, State) ->
+ ?CDEBUG("load_traverse/~p -> ~n"
+ " Line: ~p~n"
+ " Module: ~p~n"
+ " Context: ~p~n"
+ " Contexts: ~p~n"
+ " NewContexts: ~p",
+ [State,Line,Module,Context,Contexts,NewContexts]),
+ case is_exported(Module, {load, 2}) of
+ true ->
+ ?CDEBUG("load_traverse -> ~p:load/2 exported",[Module]),
+ case catch apply(Module, load, [Line, Context]) of
+ {'EXIT', {function_clause, _}} ->
+ ?CDEBUG("load_traverse -> exit: function_clause"
+ "~n Module: ~p"
+ "~n Line: ~s",[Module,Line]),
+ load_traverse(Line, Contexts, Modules, [Context|NewContexts], ConfigList, State);
+ {'EXIT', Reason} ->
+ ?CDEBUG("load_traverse -> exit: ~p",[Reason]),
+ error_logger:error_report({'EXIT', Reason}),
+ load_traverse(Line, Contexts, Modules, [Context|NewContexts], ConfigList, State);
+ {ok, NewContext} ->
+ ?CDEBUG("load_traverse -> ~n"
+ " NewContext: ~p",[NewContext]),
+ load_traverse(Line, Contexts, Modules, [NewContext|NewContexts], ConfigList,yes);
+ {ok, NewContext, ConfigEntry} when tuple(ConfigEntry) ->
+ ?CDEBUG("load_traverse (tuple) -> ~n"
+ " NewContext: ~p~n"
+ " ConfigEntry: ~p",[NewContext,ConfigEntry]),
+ load_traverse(Line, Contexts, Modules, [NewContext|NewContexts],
+ [ConfigEntry|ConfigList], yes);
+ {ok, NewContext, ConfigEntry} when list(ConfigEntry) ->
+ ?CDEBUG("load_traverse (list) -> ~n"
+ " NewContext: ~p~n"
+ " ConfigEntry: ~p",[NewContext,ConfigEntry]),
+ load_traverse(Line, Contexts, Modules, [NewContext|NewContexts],
+ lists:append(ConfigEntry, ConfigList), yes);
+ {error, Reason} ->
+ ?CDEBUG("load_traverse -> error: ~p",[Reason]),
+ {error, Reason}
+ end;
+ false ->
+ ?CDEBUG("load_traverse -> ~p:load/2 not exported",[Module]),
+ load_traverse(Line, Contexts, Modules, [Context|NewContexts],
+ ConfigList,yes)
+ end.
+
+
+load(eof, []) ->
+ eof;
+
+load([$M,$a,$x,$H,$e,$a,$d,$e,$r,$S,$i,$z,$e,$ |MaxHeaderSize], []) ->
+ ?DEBUG("load -> MaxHeaderSize: ~p",[MaxHeaderSize]),
+ case make_integer(MaxHeaderSize) of
+ {ok, Integer} ->
+ {ok, [], {max_header_size,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxHeaderSize)++
+ " is an invalid number of MaxHeaderSize")}
+ end;
+load([$M,$a,$x,$H,$e,$a,$d,$e,$r,$A,$c,$t,$i,$o,$n,$ |Action], []) ->
+ ?DEBUG("load -> MaxHeaderAction: ~p",[Action]),
+ {ok, [], {max_header_action,list_to_atom(clean(Action))}};
+load([$M,$a,$x,$B,$o,$d,$y,$S,$i,$z,$e,$ |MaxBodySize], []) ->
+ ?DEBUG("load -> MaxBodySize: ~p",[MaxBodySize]),
+ case make_integer(MaxBodySize) of
+ {ok, Integer} ->
+ {ok, [], {max_body_size,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxBodySize)++
+ " is an invalid number of MaxBodySize")}
+ end;
+load([$M,$a,$x,$B,$o,$d,$y,$A,$c,$t,$i,$o,$n,$ |Action], []) ->
+ ?DEBUG("load -> MaxBodyAction: ~p",[Action]),
+ {ok, [], {max_body_action,list_to_atom(clean(Action))}};
+load([$S,$e,$r,$v,$e,$r,$N,$a,$m,$e,$ |ServerName], []) ->
+ ?DEBUG("load -> ServerName: ~p",[ServerName]),
+ {ok,[],{server_name,clean(ServerName)}};
+load([$S,$o,$c,$k,$e,$t,$T,$y,$p,$e,$ |SocketType], []) ->
+ ?DEBUG("load -> SocketType: ~p",[SocketType]),
+ case check_enum(clean(SocketType),["ssl","ip_comm"]) of
+ {ok, ValidSocketType} ->
+ {ok, [], {com_type,ValidSocketType}};
+ {error,_} ->
+ {error, ?NICE(clean(SocketType) ++ " is an invalid SocketType")}
+ end;
+load([$P,$o,$r,$t,$ |Port], []) ->
+ ?DEBUG("load -> Port: ~p",[Port]),
+ case make_integer(Port) of
+ {ok, Integer} ->
+ {ok, [], {port,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(Port)++" is an invalid Port")}
+ end;
+load([$B,$i,$n,$d,$A,$d,$d,$r,$e,$s,$s,$ |Address], []) ->
+ ?DEBUG("load -> Address: ~p",[Address]),
+ case clean(Address) of
+ "*" ->
+ {ok, [], {bind_address,any}};
+ CAddress ->
+ ?CDEBUG("load -> CAddress: ~p",[CAddress]),
+ case inet:getaddr(CAddress,inet) of
+ {ok, IPAddr} ->
+ ?CDEBUG("load -> IPAddr: ~p",[IPAddr]),
+ {ok, [], {bind_address,IPAddr}};
+ {error, _} ->
+ {error, ?NICE(CAddress++" is an invalid address")}
+ end
+ end;
+load([$K,$e,$e,$p,$A,$l,$i,$v,$e,$ |OnorOff], []) ->
+ case list_to_atom(clean(OnorOff)) of
+ off ->
+ {ok, [], {persistent_conn, false}};
+ _ ->
+ {ok, [], {persistent_conn, true}}
+ end;
+load([$M,$a,$x,$K,$e,$e,$p,$A,$l,$i,$v,$e,$R,$e,$q,$u,$e,$s,$t,$ |MaxRequests], []) ->
+ case make_integer(MaxRequests) of
+ {ok, Integer} ->
+ {ok, [], {max_keep_alive_request, Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxRequests)++" is an invalid MaxKeepAliveRequest")}
+ end;
+load([$K,$e,$e,$p,$A,$l,$i,$v,$e,$T,$i,$m,$e,$o,$u,$t,$ |Timeout], []) ->
+ case make_integer(Timeout) of
+ {ok, Integer} ->
+ {ok, [], {keep_alive_timeout, Integer*1000}};
+ {error, _} ->
+ {error, ?NICE(clean(Timeout)++" is an invalid KeepAliveTimeout")}
+ end;
+load([$M,$o,$d,$u,$l,$e,$s,$ |Modules], []) ->
+ {ok, ModuleList} = regexp:split(Modules," "),
+ {ok, [], {modules,[list_to_atom(X) || X <- ModuleList]}};
+load([$S,$e,$r,$v,$e,$r,$A,$d,$m,$i,$n,$ |ServerAdmin], []) ->
+ {ok, [], {server_admin,clean(ServerAdmin)}};
+load([$S,$e,$r,$v,$e,$r,$R,$o,$o,$t,$ |ServerRoot], []) ->
+ case is_directory(clean(ServerRoot)) of
+ {ok, Directory} ->
+ MimeTypesFile =
+ filename:join([clean(ServerRoot),"conf", "mime.types"]),
+ case load_mime_types(MimeTypesFile) of
+ {ok, MimeTypesList} ->
+ {ok, [], [{server_root,string:strip(Directory,right,$/)},
+ {mime_types,MimeTypesList}]};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, _} ->
+ {error, ?NICE(clean(ServerRoot)++" is an invalid ServerRoot")}
+ end;
+load([$M,$a,$x,$C,$l,$i,$e,$n,$t,$s,$ |MaxClients], []) ->
+ ?DEBUG("load -> MaxClients: ~p",[MaxClients]),
+ case make_integer(MaxClients) of
+ {ok, Integer} ->
+ {ok, [], {max_clients,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxClients)++" is an invalid number of MaxClients")}
+ end;
+load([$D,$o,$c,$u,$m,$e,$n,$t,$R,$o,$o,$t,$ |DocumentRoot],[]) ->
+ case is_directory(clean(DocumentRoot)) of
+ {ok, Directory} ->
+ {ok, [], {document_root,string:strip(Directory,right,$/)}};
+ {error, _} ->
+ {error, ?NICE(clean(DocumentRoot)++"is an invalid DocumentRoot")}
+ end;
+load([$D,$e,$f,$a,$u,$l,$t,$T,$y,$p,$e,$ |DefaultType], []) ->
+ {ok, [], {default_type,clean(DefaultType)}};
+load([$S,$S,$L,$C,$e,$r,$t,$i,$f,$i,$c,$a,$t,$e,$F,$i,$l,$e,$ | SSLCertificateFile], []) ->
+ ?DEBUG("load -> SSLCertificateFile: ~p",[SSLCertificateFile]),
+ case is_file(clean(SSLCertificateFile)) of
+ {ok, File} ->
+ {ok, [], {ssl_certificate_file,File}};
+ {error, _} ->
+ {error, ?NICE(clean(SSLCertificateFile)++
+ " is an invalid SSLCertificateFile")}
+ end;
+load([$S,$S,$L,$C,$e,$r,$t,$i,$f,$i,$c,$a,$t,$e,$K,$e,$y,$F,$i,$l,$e,$ |
+ SSLCertificateKeyFile], []) ->
+ ?DEBUG("load -> SSLCertificateKeyFile: ~p",[SSLCertificateKeyFile]),
+ case is_file(clean(SSLCertificateKeyFile)) of
+ {ok, File} ->
+ {ok, [], {ssl_certificate_key_file,File}};
+ {error, _} ->
+ {error, ?NICE(clean(SSLCertificateKeyFile)++
+ " is an invalid SSLCertificateKeyFile")}
+ end;
+load([$S,$S,$L,$V,$e,$r,$i,$f,$y,$C,$l,$i,$e,$n,$t,$ |SSLVerifyClient], []) ->
+ ?DEBUG("load -> SSLVerifyClient: ~p",[SSLVerifyClient]),
+ case make_integer(clean(SSLVerifyClient)) of
+ {ok, Integer} when Integer >=0,Integer =< 2 ->
+ {ok, [], {ssl_verify_client,Integer}};
+ {ok, Integer} ->
+ {error,?NICE(clean(SSLVerifyClient)++" is an invalid SSLVerifyClient")};
+ {error, nomatch} ->
+ {error,?NICE(clean(SSLVerifyClient)++" is an invalid SSLVerifyClient")}
+ end;
+load([$S,$S,$L,$V,$e,$r,$i,$f,$y,$D,$e,$p,$t,$h,$ |
+ SSLVerifyDepth], []) ->
+ ?DEBUG("load -> SSLVerifyDepth: ~p",[SSLVerifyDepth]),
+ case make_integer(clean(SSLVerifyDepth)) of
+ {ok, Integer} when Integer > 0 ->
+ {ok, [], {ssl_verify_client_depth,Integer}};
+ {ok, Integer} ->
+ {error,?NICE(clean(SSLVerifyDepth) ++
+ " is an invalid SSLVerifyDepth")};
+ {error, nomatch} ->
+ {error,?NICE(clean(SSLVerifyDepth) ++
+ " is an invalid SSLVerifyDepth")}
+ end;
+load([$S,$S,$L,$C,$i,$p,$h,$e,$r,$s,$ | SSLCiphers], []) ->
+ ?DEBUG("load -> SSLCiphers: ~p",[SSLCiphers]),
+ {ok, [], {ssl_ciphers, clean(SSLCiphers)}};
+load([$S,$S,$L,$C,$A,$C,$e,$r,$t,$i,$f,$i,$c,$a,$t,$e,$F,$i,$l,$e,$ |
+ SSLCACertificateFile], []) ->
+ case is_file(clean(SSLCACertificateFile)) of
+ {ok, File} ->
+ {ok, [], {ssl_ca_certificate_file,File}};
+ {error, _} ->
+ {error, ?NICE(clean(SSLCACertificateFile)++
+ " is an invalid SSLCACertificateFile")}
+ end;
+load([$S,$S,$L,$P,$a,$s,$s,$w,$o,$r,$d,$C,$a,$l,$l,$b,$a,$c,$k,$M,$o,$d,$u,$l,$e,$ | SSLPasswordCallbackModule], []) ->
+ ?DEBUG("load -> SSLPasswordCallbackModule: ~p",
+ [SSLPasswordCallbackModule]),
+ {ok, [], {ssl_password_callback_module,
+ list_to_atom(clean(SSLPasswordCallbackModule))}};
+load([$S,$S,$L,$P,$a,$s,$s,$w,$o,$r,$d,$C,$a,$l,$l,$b,$a,$c,$k,$F,$u,$n,$c,$t,$i,$o,$n,$ | SSLPasswordCallbackFunction], []) ->
+ ?DEBUG("load -> SSLPasswordCallbackFunction: ~p",
+ [SSLPasswordCallbackFunction]),
+ {ok, [], {ssl_password_callback_function,
+ list_to_atom(clean(SSLPasswordCallbackFunction))}}.
+
+
+%%
+%% load_mime_types/1 -> {ok, MimeTypes} | {error, Reason}
+%%
+load_mime_types(MimeTypesFile) ->
+ case file:open(MimeTypesFile, [read]) of
+ {ok, Stream} ->
+ parse_mime_types(Stream, []);
+ {error, _} ->
+ {error, ?NICE("Can't open " ++ MimeTypesFile)}
+ end.
+
+parse_mime_types(Stream,MimeTypesList) ->
+ Line=
+ case io:get_line(Stream,'') of
+ eof ->
+ eof;
+ String ->
+ clean(String)
+ end,
+ parse_mime_types(Stream, MimeTypesList, Line).
+
+parse_mime_types(Stream, MimeTypesList, eof) ->
+ file:close(Stream),
+ {ok, MimeTypesList};
+parse_mime_types(Stream, MimeTypesList, "") ->
+ parse_mime_types(Stream, MimeTypesList);
+parse_mime_types(Stream, MimeTypesList, [$#|_]) ->
+ parse_mime_types(Stream, MimeTypesList);
+parse_mime_types(Stream, MimeTypesList, Line) ->
+ case regexp:split(Line, " ") of
+ {ok, [NewMimeType|Suffixes]} ->
+ parse_mime_types(Stream,lists:append(suffixes(NewMimeType,Suffixes),
+ MimeTypesList));
+ {ok, _} ->
+ {error, ?NICE(Line)}
+ end.
+
+suffixes(MimeType,[]) ->
+ [];
+suffixes(MimeType,[Suffix|Rest]) ->
+ [{Suffix,MimeType}|suffixes(MimeType,Rest)].
+
+%%
+%% Phase 2: Store
+%%
+
+%% store
+
+store(ConfigList) ->
+ Modules = httpd_util:key1search(ConfigList, modules, []),
+ Port = httpd_util:key1search(ConfigList, port),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ Name = httpd_util:make_name("httpd_conf",Addr,Port),
+ ?CDEBUG("store -> Name = ~p",[Name]),
+ ConfigDB = ets:new(Name, [named_table, bag, protected]),
+ ?CDEBUG("store -> ConfigDB = ~p",[ConfigDB]),
+ store(ConfigDB, ConfigList, lists:append(Modules,[?MODULE]),ConfigList).
+
+store(ConfigDB, ConfigList, Modules,[]) ->
+ ?vtrace("store -> done",[]),
+ ?CDEBUG("store -> done",[]),
+ {ok, ConfigDB};
+store(ConfigDB, ConfigList, Modules, [ConfigListEntry|Rest]) ->
+ ?vtrace("store -> entry with"
+ "~n ConfigListEntry: ~p",[ConfigListEntry]),
+ ?CDEBUG("store -> "
+ "~n ConfigListEntry: ~p",[ConfigListEntry]),
+ case store_traverse(ConfigListEntry,ConfigList,Modules) of
+ {ok, ConfigDBEntry} when tuple(ConfigDBEntry) ->
+ ?vtrace("store -> ConfigDBEntry(tuple): "
+ "~n ~p",[ConfigDBEntry]),
+ ?CDEBUG("store -> ConfigDBEntry(tuple): "
+ "~n ~p",[ConfigDBEntry]),
+ ets:insert(ConfigDB,ConfigDBEntry),
+ store(ConfigDB,ConfigList,Modules,Rest);
+ {ok, ConfigDBEntry} when list(ConfigDBEntry) ->
+ ?vtrace("store -> ConfigDBEntry(list): "
+ "~n ~p",[ConfigDBEntry]),
+ ?CDEBUG("store -> ConfigDBEntry(list): "
+ "~n ~p",[ConfigDBEntry]),
+ lists:foreach(fun(Entry) ->
+ ets:insert(ConfigDB,Entry)
+ end,ConfigDBEntry),
+ store(ConfigDB,ConfigList,Modules,Rest);
+ {error, Reason} ->
+ ?vlog("store -> error: ~p",[Reason]),
+ ?ERROR("store -> error: ~p",[Reason]),
+ {error,Reason}
+ end.
+
+store_traverse(ConfigListEntry,ConfigList,[]) ->
+ {error,?NICE("Unable to store configuration...")};
+store_traverse(ConfigListEntry, ConfigList, [Module|Rest]) ->
+ case is_exported(Module, {store, 2}) of
+ true ->
+ ?CDEBUG("store_traverse -> call ~p:store/2",[Module]),
+ case catch apply(Module,store,[ConfigListEntry, ConfigList]) of
+ {'EXIT',{function_clause,_}} ->
+ ?CDEBUG("store_traverse -> exit: function_clause",[]),
+ store_traverse(ConfigListEntry,ConfigList,Rest);
+ {'EXIT',Reason} ->
+ ?ERROR("store_traverse -> exit: ~p",[Reason]),
+ error_logger:error_report({'EXIT',Reason}),
+ store_traverse(ConfigListEntry,ConfigList,Rest);
+ Result ->
+ ?CDEBUG("store_traverse -> ~n"
+ " Result: ~p",[Result]),
+ Result
+ end;
+ false ->
+ store_traverse(ConfigListEntry,ConfigList,Rest)
+ end.
+
+store({mime_types,MimeTypesList},ConfigList) ->
+ Port = httpd_util:key1search(ConfigList, port),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ Name = httpd_util:make_name("httpd_mime",Addr,Port),
+ ?CDEBUG("store(mime_types) -> Name: ~p",[Name]),
+ {ok, MimeTypesDB} = store_mime_types(Name,MimeTypesList),
+ ?CDEBUG("store(mime_types) -> ~n"
+ " MimeTypesDB: ~p~n"
+ " MimeTypesDB info: ~p",
+ [MimeTypesDB,ets:info(MimeTypesDB)]),
+ {ok, {mime_types,MimeTypesDB}};
+store(ConfigListEntry,ConfigList) ->
+ ?CDEBUG("store/2 -> ~n"
+ " ConfigListEntry: ~p~n"
+ " ConfigList: ~p",
+ [ConfigListEntry,ConfigList]),
+ {ok, ConfigListEntry}.
+
+
+%% store_mime_types
+store_mime_types(Name,MimeTypesList) ->
+ ?CDEBUG("store_mime_types -> Name: ~p",[Name]),
+ MimeTypesDB = ets:new(Name, [set, protected]),
+ ?CDEBUG("store_mime_types -> MimeTypesDB: ~p",[MimeTypesDB]),
+ store_mime_types1(MimeTypesDB, MimeTypesList).
+
+store_mime_types1(MimeTypesDB,[]) ->
+ {ok, MimeTypesDB};
+store_mime_types1(MimeTypesDB,[Type|Rest]) ->
+ ?CDEBUG("store_mime_types1 -> Type: ~p",[Type]),
+ ets:insert(MimeTypesDB, Type),
+ store_mime_types1(MimeTypesDB, Rest).
+
+
+%%
+%% Phase 3: Remove
+%%
+
+remove_all(ConfigDB) ->
+ Modules = httpd_util:lookup(ConfigDB,modules,[]),
+ remove_traverse(ConfigDB, lists:append(Modules,[?MODULE])).
+
+remove_traverse(ConfigDB,[]) ->
+ ?vtrace("remove_traverse -> done", []),
+ ok;
+remove_traverse(ConfigDB,[Module|Rest]) ->
+ ?vtrace("remove_traverse -> call ~p:remove", [Module]),
+ case (catch apply(Module,remove,[ConfigDB])) of
+ {'EXIT',{undef,_}} ->
+ ?vtrace("remove_traverse -> undef", []),
+ remove_traverse(ConfigDB,Rest);
+ {'EXIT',{function_clause,_}} ->
+ ?vtrace("remove_traverse -> function_clause", []),
+ remove_traverse(ConfigDB,Rest);
+ {'EXIT',Reason} ->
+ ?vtrace("remove_traverse -> exit: ~p", [Reason]),
+ error_logger:error_report({'EXIT',Reason}),
+ remove_traverse(ConfigDB,Rest);
+ {error,Reason} ->
+ ?vtrace("remove_traverse -> error: ~p", [Reason]),
+ error_logger:error_report(Reason),
+ remove_traverse(ConfigDB,Rest);
+ _ ->
+ remove_traverse(ConfigDB,Rest)
+ end.
+
+remove(ConfigDB) ->
+ ets:delete(ConfigDB),
+ ok.
+
+
+%%
+%% Utility functions
+%%
+
+%% is_directory
+
+is_directory(Directory) ->
+ case file:read_file_info(Directory) of
+ {ok,FileInfo} ->
+ #file_info{type = Type, access = Access} = FileInfo,
+ is_directory(Type,Access,FileInfo,Directory);
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+is_directory(directory,read,_FileInfo,Directory) ->
+ {ok,Directory};
+is_directory(directory,read_write,_FileInfo,Directory) ->
+ {ok,Directory};
+is_directory(_Type,_Access,FileInfo,_Directory) ->
+ {error,FileInfo}.
+
+
+%% is_file
+
+is_file(File) ->
+ case file:read_file_info(File) of
+ {ok,FileInfo} ->
+ #file_info{type = Type, access = Access} = FileInfo,
+ is_file(Type,Access,FileInfo,File);
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+is_file(regular,read,_FileInfo,File) ->
+ {ok,File};
+is_file(regular,read_write,_FileInfo,File) ->
+ {ok,File};
+is_file(_Type,_Access,FileInfo,_File) ->
+ {error,FileInfo}.
+
+%% make_integer
+
+make_integer(String) ->
+ case regexp:match(clean(String),"[0-9]+") of
+ {match, _, _} ->
+ {ok, list_to_integer(clean(String))};
+ nomatch ->
+ {error, nomatch}
+ end.
+
+
+%% clean
+
+clean(String) ->
+ {ok,CleanedString,_} = regexp:gsub(String, "^[ \t\n\r\f]*|[ \t\n\r\f]*\$",""),
+ CleanedString.
+
+%% custom_clean
+
+custom_clean(String,MoreBefore,MoreAfter) ->
+ {ok,CleanedString,_}=regexp:gsub(String,"^[ \t\n\r\f"++MoreBefore++
+ "]*|[ \t\n\r\f"++MoreAfter++"]*\$",""),
+ CleanedString.
+
+%% check_enum
+
+check_enum(Enum,[]) ->
+ {error, not_valid};
+check_enum(Enum,[Enum|Rest]) ->
+ {ok, list_to_atom(Enum)};
+check_enum(Enum, [NotValid|Rest]) ->
+ check_enum(Enum, Rest).
+
+%% a_must
+
+a_must(ConfigList,[]) ->
+ ok;
+a_must(ConfigList,[Directive|Rest]) ->
+ case httpd_util:key1search(ConfigList,Directive) of
+ undefined ->
+ {missing,Directive};
+ _ ->
+ a_must(ConfigList,Rest)
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_example.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_example.erl
new file mode 100644
index 0000000000..1819650963
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_example.erl
@@ -0,0 +1,134 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_example.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_example).
+-export([print/1]).
+-export([get/2, post/2, yahoo/2, test1/2]).
+
+-export([newformat/3]).
+%% These are used by the inets test-suite
+-export([delay/1]).
+
+
+print(String) ->
+ [header(),
+ top("Print"),
+ String++"\n",
+ footer()].
+
+
+test1(Env, []) ->
+ io:format("Env:~p~n",[Env]),
+ ["<html>",
+ "<head>",
+ "<title>Test1</title>",
+ "</head>",
+ "<body>",
+ "<h1>Erlang Body</h1>",
+ "<h2>Stuff</h2>",
+ "</body>",
+ "</html>"].
+
+
+get(Env,[]) ->
+ [header(),
+ top("GET Example"),
+ "<FORM ACTION=\"/cgi-bin/erl/httpd_example:get\" METHOD=GET>
+<B>Input:</B> <INPUT TYPE=\"text\" NAME=\"input1\">
+<INPUT TYPE=\"text\" NAME=\"input2\">
+<INPUT TYPE=\"submit\"><BR>
+</FORM>" ++ "\n",
+ footer()];
+
+get(Env,Input) ->
+ default(Env,Input).
+
+post(Env,[]) ->
+ [header(),
+ top("POST Example"),
+ "<FORM ACTION=\"/cgi-bin/erl/httpd_example:post\" METHOD=POST>
+<B>Input:</B> <INPUT TYPE=\"text\" NAME=\"input1\">
+<INPUT TYPE=\"text\" NAME=\"input2\">
+<INPUT TYPE=\"submit\"><BR>
+</FORM>" ++ "\n",
+ footer()];
+
+post(Env,Input) ->
+ default(Env,Input).
+
+yahoo(Env,Input) ->
+ "Location: http://www.yahoo.com\r\n\r\n".
+
+default(Env,Input) ->
+ [header(),
+ top("Default Example"),
+ "<B>Environment:</B> ",io_lib:format("~p",[Env]),"<BR>\n",
+ "<B>Input:</B> ",Input,"<BR>\n",
+ "<B>Parsed Input:</B> ",
+ io_lib:format("~p",[httpd:parse_query(Input)]),"\n",
+ footer()].
+
+header() ->
+ header("text/html").
+header(MimeType) ->
+ "Content-type: " ++ MimeType ++ "\r\n\r\n".
+
+top(Title) ->
+ "<HTML>
+<HEAD>
+<TITLE>" ++ Title ++ "</TITLE>
+</HEAD>
+<BODY>\n".
+
+footer() ->
+ "</BODY>
+</HTML>\n".
+
+
+newformat(SessionID,Env,Input)->
+ mod_esi:deliver(SessionID,"Content-Type:text/html\r\n\r\n"),
+ mod_esi:deliver(SessionID,top("new esi format test")),
+ mod_esi:deliver(SessionID,"This new format is nice<BR>"),
+ mod_esi:deliver(SessionID,"This new format is nice<BR>"),
+ mod_esi:deliver(SessionID,"This new format is nice<BR>"),
+ mod_esi:deliver(SessionID,footer()).
+
+%% ------------------------------------------------------
+
+delay(Time) when integer(Time) ->
+ i("httpd_example:delay(~p) -> do the delay",[Time]),
+ sleep(Time),
+ i("httpd_example:delay(~p) -> done, now reply",[Time]),
+ delay_reply("delay ok");
+delay(Time) when list(Time) ->
+ delay(httpd_conf:make_integer(Time));
+delay({ok,Time}) when integer(Time) ->
+ delay(Time);
+delay({error,_Reason}) ->
+ i("delay -> called with invalid time"),
+ delay_reply("delay failed: invalid delay time").
+
+delay_reply(Reply) ->
+ [header(),
+ top("delay"),
+ Reply,
+ footer()].
+
+i(F) -> i(F,[]).
+i(F,A) -> io:format(F ++ "~n",A).
+
+sleep(T) -> receive after T -> ok end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_manager.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_manager.erl
new file mode 100644
index 0000000000..78750c32c9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_manager.erl
@@ -0,0 +1,1030 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_manager.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+
+-module(httpd_manager).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+-behaviour(gen_server).
+
+%% External API
+-export([start/2, start/3, start_link/2, start_link/3, stop/1, restart/1]).
+
+%% Internal API
+-export([new_connection/1, done_connection/1]).
+
+%% Module API
+-export([config_lookup/2, config_lookup/3,
+ config_multi_lookup/2, config_multi_lookup/3,
+ config_match/2, config_match/3]).
+
+%% gen_server exports
+-export([init/1,
+ handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+
+%% Management exports
+-export([block/2, block/3, unblock/1]).
+-export([get_admin_state/1, get_usage_state/1]).
+-export([is_busy/1,is_busy/2,is_busy_or_blocked/1,is_blocked/1]). %% ???????
+-export([get_status/1, get_status/2]).
+-export([verbosity/2, verbosity/3]).
+
+
+-export([c/1]).
+
+-record(state,{socket_type = ip_comm,
+ config_file,
+ config_db = null,
+ connections, %% Current request handlers
+ admin_state = unblocked,
+ blocker_ref = undefined,
+ blocking_tmr = undefined,
+ status = []}).
+
+
+c(Port) ->
+ Ref = httpd_util:make_name("httpd",undefined,Port),
+ gen_server:call(Ref, fake_close).
+
+
+%%
+%% External API
+%%
+
+start(ConfigFile, ConfigList) ->
+ start(ConfigFile, ConfigList, []).
+
+start(ConfigFile, ConfigList, Verbosity) ->
+ Port = httpd_util:key1search(ConfigList,port,80),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ Name = make_name(Addr,Port),
+ ?LOG("start -> Name = ~p",[Name]),
+ gen_server:start({local,Name},?MODULE,
+ [ConfigFile, ConfigList, Addr, Port, Verbosity],[]).
+
+start_link(ConfigFile, ConfigList) ->
+ start_link(ConfigFile, ConfigList, []).
+
+start_link(ConfigFile, ConfigList, Verbosity) ->
+ Port = httpd_util:key1search(ConfigList,port,80),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ Name = make_name(Addr,Port),
+ ?LOG("start_link -> Name = ~p",[Name]),
+ gen_server:start_link({local, Name},?MODULE,
+ [ConfigFile, ConfigList, Addr, Port, Verbosity],[]).
+
+%% stop
+
+stop(ServerRef) ->
+ gen_server:call(ServerRef, stop).
+
+%% restart
+
+restart(ServerRef) ->
+ gen_server:call(ServerRef, restart).
+
+
+%%%----------------------------------------------------------------
+
+block(ServerRef, disturbing) ->
+ call(ServerRef,block);
+
+block(ServerRef, non_disturbing) ->
+ do_block(ServerRef, non_disturbing, infinity).
+
+block(ServerRef, Method, Timeout) ->
+ do_block(ServerRef, Method, Timeout).
+
+
+%% The reason for not using call here, is that the manager cannot
+%% _wait_ for completion of the requests. It must be able to do
+%% do other things at the same time as the blocking goes on.
+do_block(ServerRef, Method, infinity) ->
+ Ref = make_ref(),
+ cast(ServerRef, {block, Method, infinity, self(), Ref}),
+ receive
+ {block_reply, Reply, Ref} ->
+ Reply
+ end;
+do_block(ServerRef,Method,Timeout) when Timeout > 0 ->
+ Ref = make_ref(),
+ cast(ServerRef,{block,Method,Timeout,self(),Ref}),
+ receive
+ {block_reply,Reply,Ref} ->
+ Reply
+ end.
+
+
+%%%----------------------------------------------------------------
+
+%% unblock
+
+unblock(ServerRef) ->
+ call(ServerRef,unblock).
+
+%% get admin/usage state
+
+get_admin_state(ServerRef) ->
+ call(ServerRef,get_admin_state).
+
+get_usage_state(ServerRef) ->
+ call(ServerRef,get_usage_state).
+
+
+%% get_status
+
+get_status(ServerRef) ->
+ gen_server:call(ServerRef,get_status).
+
+get_status(ServerRef,Timeout) ->
+ gen_server:call(ServerRef,get_status,Timeout).
+
+
+verbosity(ServerRef,Verbosity) ->
+ verbosity(ServerRef,all,Verbosity).
+
+verbosity(ServerRef,all,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,all,Verbosity});
+verbosity(ServerRef,manager,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,manager,Verbosity});
+verbosity(ServerRef,request,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,request,Verbosity});
+verbosity(ServerRef,acceptor,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,acceptor,Verbosity});
+verbosity(ServerRef,security,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,security,Verbosity});
+verbosity(ServerRef,auth,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,auth,Verbosity}).
+
+%%
+%% Internal API
+%%
+
+
+%% new_connection
+
+new_connection(Manager) ->
+ gen_server:call(Manager, {new_connection, self()}).
+
+%% done
+
+done_connection(Manager) ->
+ gen_server:cast(Manager, {done_connection, self()}).
+
+
+%% is_busy(ServerRef) -> true | false
+%%
+%% Tests if the server is (in usage state) busy,
+%% i.e. has rached the heavy load limit.
+%%
+
+is_busy(ServerRef) ->
+ gen_server:call(ServerRef,is_busy).
+
+is_busy(ServerRef,Timeout) ->
+ gen_server:call(ServerRef,is_busy,Timeout).
+
+
+%% is_busy_or_blocked(ServerRef) -> busy | blocked | false
+%%
+%% Tests if the server is busy (usage state), i.e. has rached,
+%% the heavy load limit, or blocked (admin state) .
+%%
+
+is_busy_or_blocked(ServerRef) ->
+ gen_server:call(ServerRef,is_busy_or_blocked).
+
+
+%% is_blocked(ServerRef) -> true | false
+%%
+%% Tests if the server is blocked (admin state) .
+%%
+
+is_blocked(ServerRef) ->
+ gen_server:call(ServerRef,is_blocked).
+
+
+%%
+%% Module API. Theese functions are intended for use from modules only.
+%%
+
+config_lookup(Port, Query) ->
+ config_lookup(undefined, Port, Query).
+config_lookup(Addr, Port, Query) ->
+ Name = httpd_util:make_name("httpd",Addr,Port),
+ gen_server:call(whereis(Name), {config_lookup, Query}).
+
+config_multi_lookup(Port, Query) ->
+ config_multi_lookup(undefined,Port,Query).
+config_multi_lookup(Addr,Port, Query) ->
+ Name = httpd_util:make_name("httpd",Addr,Port),
+ gen_server:call(whereis(Name), {config_multi_lookup, Query}).
+
+config_match(Port, Pattern) ->
+ config_match(undefined,Port,Pattern).
+config_match(Addr, Port, Pattern) ->
+ Name = httpd_util:make_name("httpd",Addr,Port),
+ gen_server:call(whereis(Name), {config_match, Pattern}).
+
+
+%%
+%% Server call-back functions
+%%
+
+%% init
+
+init([ConfigFile, ConfigList, Addr, Port, Verbosity]) ->
+ process_flag(trap_exit, true),
+ case (catch do_init(ConfigFile, ConfigList, Addr, Port, Verbosity)) of
+ {error, Reason} ->
+ ?vlog("failed starting server: ~p", [Reason]),
+ {stop, Reason};
+ {ok, State} ->
+ {ok, State}
+ end.
+
+
+do_init(ConfigFile, ConfigList, Addr, Port, Verbosity) ->
+ put(sname,man),
+ set_verbosity(Verbosity),
+ ?vlog("starting",[]),
+ ConfigDB = do_initial_store(ConfigList),
+ ?vtrace("config db: ~p", [ConfigDB]),
+ SocketType = httpd_socket:config(ConfigDB),
+ ?vtrace("socket type: ~p, now start acceptor", [SocketType]),
+ case httpd_acceptor_sup:start_acceptor(SocketType, Addr, Port, ConfigDB) of
+ {ok, Pid} ->
+ ?vtrace("acceptor started: ~p", [Pid]),
+ Status = [{max_conn,0}, {last_heavy_load,never},
+ {last_connection,never}],
+ State = #state{socket_type = SocketType,
+ config_file = ConfigFile,
+ config_db = ConfigDB,
+ connections = [],
+ status = Status},
+ ?vdebug("started",[]),
+ {ok, State};
+ Else ->
+ Else
+ end.
+
+
+do_initial_store(ConfigList) ->
+ case httpd_conf:store(ConfigList) of
+ {ok, ConfigDB} ->
+ ConfigDB;
+ {error, Reason} ->
+ ?vinfo("failed storing configuration: ~p",[Reason]),
+ throw({error, Reason})
+ end.
+
+
+
+%% handle_call
+
+handle_call(stop, _From, State) ->
+ ?vlog("stop",[]),
+ {stop, normal, ok, State};
+
+handle_call({config_lookup, Query}, _From, State) ->
+ ?vlog("config lookup: Query = ~p",[Query]),
+ Res = httpd_util:lookup(State#state.config_db, Query),
+ ?vdebug("config lookup result: ~p",[Res]),
+ {reply, Res, State};
+
+handle_call({config_multi_lookup, Query}, _From, State) ->
+ ?vlog("multi config lookup: Query = ~p",[Query]),
+ Res = httpd_util:multi_lookup(State#state.config_db, Query),
+ ?vdebug("multi config lookup result: ~p",[Res]),
+ {reply, Res, State};
+
+handle_call({config_match, Query}, _From, State) ->
+ ?vlog("config match: Query = ~p",[Query]),
+ Res = ets:match_object(State#state.config_db, Query),
+ ?vdebug("config match result: ~p",[Res]),
+ {reply, Res, State};
+
+handle_call(get_status, _From, State) ->
+ ?vdebug("get status",[]),
+ ManagerStatus = manager_status(self()),
+ %% AuthStatus = auth_status(get(auth_server)),
+ %% SecStatus = sec_status(get(sec_server)),
+ %% AccStatus = sec_status(get(acceptor_server)),
+ S1 = [{current_conn,length(State#state.connections)}|State#state.status]++
+ [ManagerStatus],
+ ?vtrace("status = ~p",[S1]),
+ {reply,S1,State};
+
+handle_call(is_busy, From, State) ->
+ Reply = case get_ustate(State) of
+ busy ->
+ true;
+ _ ->
+ false
+ end,
+ ?vlog("is busy: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(is_busy_or_blocked, From, State) ->
+ Reply =
+ case get_astate(State) of
+ unblocked ->
+ case get_ustate(State) of
+ busy ->
+ busy;
+ _ ->
+ false
+ end;
+ _ ->
+ blocked
+ end,
+ ?vlog("is busy or blocked: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(is_blocked, From, State) ->
+ Reply =
+ case get_astate(State) of
+ unblocked ->
+ false;
+ _ ->
+ true
+ end,
+ ?vlog("is blocked: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(get_admin_state, From, State) ->
+ Reply = get_astate(State),
+ ?vlog("admin state: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(get_usage_state, From, State) ->
+ Reply = get_ustate(State),
+ ?vlog("usage state: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call({verbosity,Who,Verbosity}, From, State) ->
+ V = ?vvalidate(Verbosity),
+ ?vlog("~n Set new verbosity to ~p for ~p",[V,Who]),
+ Reply = set_verbosity(Who,V,State),
+ {reply,Reply,State};
+
+handle_call(restart, From, State) when State#state.admin_state == blocked ->
+ ?vlog("restart",[]),
+ case handle_restart(State) of
+ {stop, Reply,S1} ->
+ {stop, Reply, S1};
+ {_, Reply, S1} ->
+ {reply,Reply,S1}
+ end;
+
+handle_call(restart, From, State) ->
+ ?vlog("restart(~p)",[State#state.admin_state]),
+ {reply,{error,{invalid_admin_state,State#state.admin_state}},State};
+
+handle_call(block, From, State) ->
+ ?vlog("block(disturbing)",[]),
+ {Reply,S1} = handle_block(State),
+ {reply,Reply,S1};
+
+handle_call(unblock, {From,_Tag}, State) ->
+ ?vlog("unblock",[]),
+ {Reply,S1} = handle_unblock(State,From),
+ {reply, Reply, S1};
+
+handle_call({new_connection, Pid}, From, State) ->
+ ?vlog("~n New connection (~p) when connection count = ~p",
+ [Pid,length(State#state.connections)]),
+ {S, S1} = handle_new_connection(State, Pid),
+ Reply = {S, get(request_handler_verbosity)},
+ {reply, Reply, S1};
+
+handle_call(Request, From, State) ->
+ ?vinfo("~n unknown request '~p' from ~p", [Request,From]),
+ String =
+ lists:flatten(
+ io_lib:format("Unknown request "
+ "~n ~p"
+ "~nto manager (~p)"
+ "~nfrom ~p",
+ [Request, self(), From])),
+ report_error(State,String),
+ {reply, ok, State}.
+
+
+%% handle_cast
+
+handle_cast({done_connection, Pid}, State) ->
+ ?vlog("~n Done connection (~p)", [Pid]),
+ S1 = handle_done_connection(State, Pid),
+ {noreply, S1};
+
+handle_cast({block, disturbing, Timeout, From, Ref}, State) ->
+ ?vlog("block(disturbing,~p)",[Timeout]),
+ S1 = handle_block(State, Timeout, From, Ref),
+ {noreply,S1};
+
+handle_cast({block, non_disturbing, Timeout, From, Ref}, State) ->
+ ?vlog("block(non-disturbing,~p)",[Timeout]),
+ S1 = handle_nd_block(State, Timeout, From, Ref),
+ {noreply,S1};
+
+handle_cast(Message, State) ->
+ ?vinfo("~n received unknown message '~p'",[Message]),
+ String =
+ lists:flatten(
+ io_lib:format("Unknown message "
+ "~n ~p"
+ "~nto manager (~p)",
+ [Message, self()])),
+ report_error(State, String),
+ {noreply, State}.
+
+%% handle_info
+
+handle_info({block_timeout, Method}, State) ->
+ ?vlog("received block_timeout event",[]),
+ S1 = handle_block_timeout(State,Method),
+ {noreply, S1};
+
+handle_info({'DOWN', Ref, process, _Object, Info}, State) ->
+ ?vlog("~n down message for ~p",[Ref]),
+ S1 =
+ case State#state.blocker_ref of
+ Ref ->
+ handle_blocker_exit(State);
+ _ ->
+ %% Not our blocker, so ignore
+ State
+ end,
+ {noreply, S1};
+
+handle_info({'EXIT', Pid, normal}, State) ->
+ ?vdebug("~n Normal exit message from ~p", [Pid]),
+ {noreply, State};
+
+handle_info({'EXIT', Pid, blocked}, S) ->
+ ?vdebug("blocked exit signal from request handler (~p)", [Pid]),
+ {noreply, S};
+
+handle_info({'EXIT', Pid, Reason}, State) ->
+ ?vlog("~n Exit message from ~p for reason ~p",[Pid, Reason]),
+ S1 = check_connections(State, Pid, Reason),
+ {noreply, S1};
+
+handle_info(Info, State) ->
+ ?vinfo("~n received unknown info '~p'",[Info]),
+ String =
+ lists:flatten(
+ io_lib:format("Unknown info "
+ "~n ~p"
+ "~nto manager (~p)",
+ [Info, self()])),
+ report_error(State, String),
+ {noreply, State}.
+
+
+%% terminate
+
+terminate(R, #state{config_db = Db}) ->
+ ?vlog("Terminating for reason: ~n ~p", [R]),
+ httpd_conf:remove_all(Db),
+ ok.
+
+
+%% code_change({down,ToVsn}, State, Extra)
+%%
+%% NOTE:
+%% Actually upgrade from 2.5.1 to 2.5.3 and downgrade from
+%% 2.5.3 to 2.5.1 is done with an application restart, so
+%% these function is actually never used. The reason for keeping
+%% this stuff is only for future use.
+%%
+code_change({down,ToVsn},State,Extra) ->
+ {ok,State};
+
+%% code_change(FromVsn, State, Extra)
+%%
+code_change(FromVsn,State,Extra) ->
+ {ok,State}.
+
+
+
+%% -------------------------------------------------------------------------
+%% check_connection
+%%
+%%
+%%
+%%
+
+check_connections(#state{connections = []} = State, _Pid, _Reason) ->
+ State;
+check_connections(#state{admin_state = shutting_down,
+ connections = Connections} = State, Pid, Reason) ->
+ %% Could be a crashing request handler
+ case lists:delete(Pid, Connections) of
+ [] -> % Crashing request handler => block complete
+ String =
+ lists:flatten(
+ io_lib:format("request handler (~p) crashed:"
+ "~n ~p", [Pid, Reason])),
+ report_error(State, String),
+ ?vlog("block complete",[]),
+ demonitor_blocker(State#state.blocker_ref),
+ {Tmr,From,Ref} = State#state.blocking_tmr,
+ ?vlog("(possibly) stop block timer",[]),
+ stop_block_tmr(Tmr),
+ ?vlog("and send the reply",[]),
+ From ! {block_reply,ok,Ref},
+ State#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined};
+ Connections1 ->
+ State#state{connections = Connections1}
+ end;
+check_connections(#state{connections = Connections} = State, Pid, Reason) ->
+ case lists:delete(Pid, Connections) of
+ Connections -> % Not a request handler, so ignore
+ State;
+ Connections1 ->
+ String =
+ lists:flatten(
+ io_lib:format("request handler (~p) crashed:"
+ "~n ~p", [Pid, Reason])),
+ report_error(State, String),
+ State#state{connections = lists:delete(Pid, Connections)}
+ end.
+
+
+%% -------------------------------------------------------------------------
+%% handle_[new | done]_connection
+%%
+%%
+%%
+%%
+
+handle_new_connection(State, Handler) ->
+ UsageState = get_ustate(State),
+ AdminState = get_astate(State),
+ handle_new_connection(UsageState, AdminState, State, Handler).
+
+handle_new_connection(busy, unblocked, State, Handler) ->
+ Status = update_heavy_load_status(State#state.status),
+ {{reject, busy},
+ State#state{status = Status}};
+
+handle_new_connection(_UsageState, unblocked, State, Handler) ->
+ Connections = State#state.connections,
+ Status = update_connection_status(State#state.status,
+ length(Connections)+1),
+ link(Handler),
+ {accept,
+ State#state{connections = [Handler|Connections], status = Status}};
+
+handle_new_connection(_UsageState, _AdminState, State, _Handler) ->
+ {{reject, blocked},
+ State}.
+
+
+handle_done_connection(#state{admin_state = shutting_down,
+ connections = Connections} = State, Handler) ->
+ unlink(Handler),
+ case lists:delete(Handler, Connections) of
+ [] -> % Ok, block complete
+ ?vlog("block complete",[]),
+ demonitor_blocker(State#state.blocker_ref),
+ {Tmr,From,Ref} = State#state.blocking_tmr,
+ ?vlog("(possibly) stop block timer",[]),
+ stop_block_tmr(Tmr),
+ ?vlog("and send the reply",[]),
+ From ! {block_reply,ok,Ref},
+ State#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined};
+ Connections1 ->
+ State#state{connections = Connections1}
+ end;
+
+handle_done_connection(#state{connections = Connections} = State, Handler) ->
+ State#state{connections = lists:delete(Handler, Connections)}.
+
+
+%% -------------------------------------------------------------------------
+%% handle_block
+%%
+%%
+%%
+%%
+handle_block(#state{admin_state = AdminState} = S) ->
+ handle_block(S, AdminState).
+
+handle_block(S,unblocked) ->
+ %% Kill all connections
+ ?vtrace("handle_block(unblocked) -> kill all request handlers",[]),
+%% [exit(Pid,blocked) || Pid <- S#state.connections],
+ [kill_handler(Pid) || Pid <- S#state.connections],
+ {ok,S#state{connections = [], admin_state = blocked}};
+handle_block(S,blocked) ->
+ ?vtrace("handle_block(blocked) -> already blocked",[]),
+ {ok,S};
+handle_block(S,shutting_down) ->
+ ?vtrace("handle_block(shutting_down) -> ongoing...",[]),
+ {{error,shutting_down},S}.
+
+
+kill_handler(Pid) ->
+ ?vtrace("kill request handler: ~p",[Pid]),
+ exit(Pid, blocked).
+%% exit(Pid, kill).
+
+handle_block(S,Timeout,From,Ref) when Timeout >= 0 ->
+ do_block(S,Timeout,From,Ref);
+
+handle_block(S,Timeout,From,Ref) ->
+ Reply = {error,{invalid_block_request,Timeout}},
+ From ! {block_reply,Reply,Ref},
+ S.
+
+do_block(S,Timeout,From,Ref) ->
+ case S#state.connections of
+ [] ->
+ %% Already in idle usage state => go directly to blocked
+ ?vdebug("do_block -> already in idle usage state",[]),
+ From ! {block_reply,ok,Ref},
+ S#state{admin_state = blocked};
+ _ ->
+ %% Active or Busy usage state => go to shutting_down
+ ?vdebug("do_block -> active or busy usage state",[]),
+ %% Make sure we get to know if blocker dies...
+ ?vtrace("do_block -> create blocker monitor",[]),
+ MonitorRef = monitor_blocker(From),
+ ?vtrace("do_block -> (possibly) start block timer",[]),
+ Tmr = {start_block_tmr(Timeout,disturbing),From,Ref},
+ S#state{admin_state = shutting_down,
+ blocker_ref = MonitorRef, blocking_tmr = Tmr}
+ end.
+
+handle_nd_block(S,infinity,From,Ref) ->
+ do_nd_block(S,infinity,From,Ref);
+
+handle_nd_block(S,Timeout,From,Ref) when Timeout >= 0 ->
+ do_nd_block(S,Timeout,From,Ref);
+
+handle_nd_block(S,Timeout,From,Ref) ->
+ Reply = {error,{invalid_block_request,Timeout}},
+ From ! {block_reply,Reply,Ref},
+ S.
+
+do_nd_block(S,Timeout,From,Ref) ->
+ case S#state.connections of
+ [] ->
+ %% Already in idle usage state => go directly to blocked
+ ?vdebug("do_nd_block -> already in idle usage state",[]),
+ From ! {block_reply,ok,Ref},
+ S#state{admin_state = blocked};
+ _ ->
+ %% Active or Busy usage state => go to shutting_down
+ ?vdebug("do_nd_block -> active or busy usage state",[]),
+ %% Make sure we get to know if blocker dies...
+ ?vtrace("do_nd_block -> create blocker monitor",[]),
+ MonitorRef = monitor_blocker(From),
+ ?vtrace("do_nd_block -> (possibly) start block timer",[]),
+ Tmr = {start_block_tmr(Timeout,non_disturbing),From,Ref},
+ S#state{admin_state = shutting_down,
+ blocker_ref = MonitorRef, blocking_tmr = Tmr}
+ end.
+
+handle_block_timeout(S,Method) ->
+ %% Time to take this to the road...
+ demonitor_blocker(S#state.blocker_ref),
+ handle_block_timeout1(S,Method,S#state.blocking_tmr).
+
+handle_block_timeout1(S,non_disturbing,{_,From,Ref}) ->
+ ?vdebug("handle_block_timeout1(non-disturbing) -> send reply: timeout",[]),
+ From ! {block_reply,{error,timeout},Ref},
+ S#state{admin_state = unblocked,
+ blocker_ref = undefined, blocking_tmr = undefined};
+
+handle_block_timeout1(S,disturbing,{_,From,Ref}) ->
+ ?vdebug("handle_block_timeout1(disturbing) -> kill all connections",[]),
+ [exit(Pid,blocked) || Pid <- S#state.connections],
+
+ ?vdebug("handle_block_timeout1 -> send reply: ok",[]),
+ From ! {block_reply,ok,Ref},
+ S#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined, blocking_tmr = undefined};
+
+handle_block_timeout1(S,Method,{_,From,Ref}) ->
+ ?vinfo("received block timeout with unknown block method:"
+ "~n Method: ~p",[Method]),
+ From ! {block_reply,{error,{unknown_block_method,Method}},Ref},
+ S#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined, blocking_tmr = undefined};
+
+handle_block_timeout1(S,Method,TmrInfo) ->
+ ?vinfo("received block timeout with erroneous timer info:"
+ "~n Method: ~p"
+ "~n TmrInfo: ~p",[Method,TmrInfo]),
+ S#state{admin_state = unblocked,
+ blocker_ref = undefined, blocking_tmr = undefined}.
+
+handle_unblock(S,FromA) ->
+ handle_unblock(S,FromA,S#state.admin_state).
+
+handle_unblock(S,_FromA,unblocked) ->
+ {ok,S};
+handle_unblock(S,FromA,_AdminState) ->
+ ?vtrace("handle_unblock -> (possibly) stop block timer",[]),
+ stop_block_tmr(S#state.blocking_tmr),
+ case S#state.blocking_tmr of
+ {Tmr,FromB,Ref} ->
+ %% Another process is trying to unblock
+ %% Inform the blocker
+ FromB ! {block_reply, {error,{unblocked,FromA}},Ref};
+ _ ->
+ ok
+ end,
+ {ok,S#state{admin_state = unblocked, blocking_tmr = undefined}}.
+
+%% The blocker died so we give up on the block.
+handle_blocker_exit(S) ->
+ {Tmr,_From,_Ref} = S#state.blocking_tmr,
+ ?vtrace("handle_blocker_exit -> (possibly) stop block timer",[]),
+ stop_block_tmr(Tmr),
+ S#state{admin_state = unblocked,
+ blocker_ref = undefined, blocking_tmr = undefined}.
+
+
+
+%% -------------------------------------------------------------------------
+%% handle_restart
+%%
+%%
+%%
+%%
+handle_restart(#state{config_file = undefined} = State) ->
+ {continue, {error, undefined_config_file}, State};
+handle_restart(#state{config_db = Db, config_file = ConfigFile} = State) ->
+ ?vtrace("load new configuration",[]),
+ {ok, Config} = httpd_conf:load(ConfigFile),
+ ?vtrace("check for illegal changes (addr, port and socket-type)",[]),
+ case (catch check_constant_values(Db, Config)) of
+ ok ->
+ %% If something goes wrong between the remove
+ %% and the store where fu-ed
+ ?vtrace("remove old configuration, now hold you breath...",[]),
+ httpd_conf:remove_all(Db),
+ ?vtrace("store new configuration",[]),
+ case httpd_conf:store(Config) of
+ {ok, NewConfigDB} ->
+ ?vlog("restart done, puh!",[]),
+ {continue, ok, State#state{config_db = NewConfigDB}};
+ Error ->
+ ?vlog("failed store new config: ~n ~p",[Error]),
+ {stop, Error, State}
+ end;
+ Error ->
+ ?vlog("restart NOT performed due to:"
+ "~n ~p",[Error]),
+ {continue, Error, State}
+ end.
+
+
+check_constant_values(Db, Config) ->
+ %% Check port number
+ ?vtrace("check_constant_values -> check port number",[]),
+ Port = httpd_util:lookup(Db,port),
+ case httpd_util:key1search(Config,port) of %% MUST be equal
+ Port ->
+ ok;
+ OtherPort ->
+ throw({error,{port_number_changed,Port,OtherPort}})
+ end,
+
+ %% Check bind address
+ ?vtrace("check_constant_values -> check bind address",[]),
+ Addr = httpd_util:lookup(Db,bind_address),
+ case httpd_util:key1search(Config,bind_address) of %% MUST be equal
+ Addr ->
+ ok;
+ OtherAddr ->
+ throw({error,{addr_changed,Addr,OtherAddr}})
+ end,
+
+ %% Check socket type
+ ?vtrace("check_constant_values -> check socket type",[]),
+ SockType = httpd_util:lookup(Db, com_type),
+ case httpd_util:key1search(Config, com_type) of %% MUST be equal
+ SockType ->
+ ok;
+ OtherSockType ->
+ throw({error,{sock_type_changed,SockType,OtherSockType}})
+ end,
+ ?vtrace("check_constant_values -> done",[]),
+ ok.
+
+
+%% get_ustate(State) -> idle | active | busy
+%%
+%% Retrieve the usage state of the HTTP server:
+%% 0 active connection -> idle
+%% max_clients active connections -> busy
+%% Otherwise -> active
+%%
+get_ustate(State) ->
+ get_ustate(length(State#state.connections),State).
+
+get_ustate(0,_State) ->
+ idle;
+get_ustate(ConnectionCnt,State) ->
+ ConfigDB = State#state.config_db,
+ case httpd_util:lookup(ConfigDB, max_clients, 150) of
+ ConnectionCnt ->
+ busy;
+ _ ->
+ active
+ end.
+
+
+get_astate(S) -> S#state.admin_state.
+
+
+%% Timer handling functions
+start_block_tmr(infinity,_) ->
+ undefined;
+start_block_tmr(T,M) ->
+ erlang:send_after(T,self(),{block_timeout,M}).
+
+stop_block_tmr(undefined) ->
+ ok;
+stop_block_tmr(Ref) ->
+ erlang:cancel_timer(Ref).
+
+
+%% Monitor blocker functions
+monitor_blocker(Pid) when pid(Pid) ->
+ case (catch erlang:monitor(process,Pid)) of
+ MonitorRef ->
+ MonitorRef;
+ {'EXIT',Reason} ->
+ undefined
+ end;
+monitor_blocker(_) ->
+ undefined.
+
+demonitor_blocker(undefined) ->
+ ok;
+demonitor_blocker(Ref) ->
+ (catch erlang:demonitor(Ref)).
+
+
+%% Some status utility functions
+
+update_heavy_load_status(Status) ->
+ update_status_with_time(Status,last_heavy_load).
+
+update_connection_status(Status,ConnCount) ->
+ S1 = case lists:keysearch(max_conn,1,Status) of
+ {value,{max_conn,C1}} when ConnCount > C1 ->
+ lists:keyreplace(max_conn,1,Status,{max_conn,ConnCount});
+ {value,{max_conn,C2}} ->
+ Status;
+ false ->
+ [{max_conn,ConnCount}|Status]
+ end,
+ update_status_with_time(S1,last_connection).
+
+update_status_with_time(Status,Key) ->
+ lists:keyreplace(Key,1,Status,{Key,universal_time()}).
+
+universal_time() -> calendar:universal_time().
+
+
+auth_status(P) when pid(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size, current_function],
+ {auth_status, process_status(P,Items,[])};
+auth_status(_) ->
+ {auth_status, undefined}.
+
+sec_status(P) when pid(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size, current_function],
+ {security_status, process_status(P,Items,[])};
+sec_status(_) ->
+ {security_status, undefined}.
+
+acceptor_status(P) when pid(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size, current_function],
+ {acceptor_status, process_status(P,Items,[])};
+acceptor_status(_) ->
+ {acceptor_status, undefined}.
+
+
+manager_status(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size],
+ {manager_status, process_status(P,Items,[])}.
+
+
+process_status(P,[],L) ->
+ [{pid,P}|lists:reverse(L)];
+process_status(P,[H|T],L) ->
+ case (catch process_info(P,H)) of
+ {H, Value} ->
+ process_status(P,T,[{H,Value}|L]);
+ _ ->
+ process_status(P,T,[{H,undefined}|L])
+ end.
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd",Addr,Port).
+
+
+report_error(State,String) ->
+ Cdb = State#state.config_db,
+ error_logger:error_report(String),
+ mod_log:report_error(Cdb,String),
+ mod_disk_log:report_error(Cdb,String).
+
+
+set_verbosity(V) ->
+ Units = [manager_verbosity,
+ acceptor_verbosity, request_handler_verbosity,
+ security_verbosity, auth_verbosity],
+ case httpd_util:key1search(V, all) of
+ undefined ->
+ set_verbosity(V, Units);
+ Verbosity when atom(Verbosity) ->
+ V1 = [{Unit, Verbosity} || Unit <- Units],
+ set_verbosity(V1, Units)
+ end.
+
+set_verbosity(_V, []) ->
+ ok;
+set_verbosity(V, [manager_verbosity = Unit|Units]) ->
+ Verbosity = httpd_util:key1search(V, Unit, ?default_verbosity),
+ put(verbosity, ?vvalidate(Verbosity)),
+ set_verbosity(V, Units);
+set_verbosity(V, [Unit|Units]) ->
+ Verbosity = httpd_util:key1search(V, Unit, ?default_verbosity),
+ put(Unit, ?vvalidate(Verbosity)),
+ set_verbosity(V, Units).
+
+
+set_verbosity(manager,V,_S) ->
+ put(verbosity,V);
+set_verbosity(acceptor,V,_S) ->
+ put(acceptor_verbosity,V);
+set_verbosity(request,V,_S) ->
+ put(request_handler_verbosity,V);
+set_verbosity(security,V,S) ->
+ OldVerbosity = put(security_verbosity,V),
+ Addr = httpd_util:lookup(S#state.config_db, bind_address),
+ Port = httpd_util:lookup(S#state.config_db, port),
+ mod_security_server:verbosity(Addr,Port,V),
+ OldVerbosity;
+set_verbosity(auth,V,S) ->
+ OldVerbosity = put(auth_verbosity,V),
+ Addr = httpd_util:lookup(S#state.config_db, bind_address),
+ Port = httpd_util:lookup(S#state.config_db, port),
+ mod_auth_server:verbosity(Addr,Port,V),
+ OldVerbosity;
+
+set_verbosity(all,V,S) ->
+ OldMv = put(verbosity,V),
+ OldAv = put(acceptor_verbosity,V),
+ OldRv = put(request_handler_verbosity,V),
+ OldSv = put(security_verbosity,V),
+ OldAv = put(auth_verbosity,V),
+ Addr = httpd_util:lookup(S#state.config_db, bind_address),
+ Port = httpd_util:lookup(S#state.config_db, port),
+ mod_security_server:verbosity(Addr,Port,V),
+ mod_auth_server:verbosity(Addr,Port,V),
+ [{manager,OldMv}, {request,OldRv}, {security,OldSv}, {auth, OldAv}].
+
+
+%%
+call(ServerRef,Request) ->
+ gen_server:call(ServerRef,Request).
+
+cast(ServerRef,Message) ->
+ gen_server:cast(ServerRef,Message).
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_misc_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_misc_sup.erl
new file mode 100644
index 0000000000..5921c5db60
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_misc_sup.erl
@@ -0,0 +1,116 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_misc_sup.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The top supervisor for the Megaco/H.248 application
+%%----------------------------------------------------------------------
+
+-module(httpd_misc_sup).
+
+-behaviour(supervisor).
+
+-include("httpd_verbosity.hrl").
+
+%% public
+-export([start/3, stop/1, init/1]).
+
+-export([start_auth_server/3, stop_auth_server/2,
+ start_sec_server/3, stop_sec_server/2]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% supervisor callback functions
+
+
+start(Addr, Port, MiscSupVerbosity) ->
+ SupName = make_name(Addr, Port),
+ supervisor:start_link({local, SupName}, ?MODULE, [MiscSupVerbosity]).
+
+stop(StartArgs) ->
+ ok.
+
+init([Verbosity]) -> % Supervisor
+ do_init(Verbosity);
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+do_init(Verbosity) ->
+ put(verbosity,?vvalidate(Verbosity)),
+ put(sname,misc_sup),
+ ?vlog("starting", []),
+ Flags = {one_for_one, 0, 1},
+ KillAfter = timer:seconds(1),
+ Workers = [],
+ {ok, {Flags, Workers}}.
+
+
+%%----------------------------------------------------------------------
+%% Function: [start|stop]_[auth|sec]_server/3
+%% Description: Starts a [auth | security] worker (child) process
+%%----------------------------------------------------------------------
+
+start_auth_server(Addr, Port, Verbosity) ->
+ start_permanent_worker(mod_auth_server, Addr, Port,
+ Verbosity, [gen_server]).
+
+stop_auth_server(Addr, Port) ->
+ stop_permanent_worker(mod_auth_server, Addr, Port).
+
+
+start_sec_server(Addr, Port, Verbosity) ->
+ start_permanent_worker(mod_security_server, Addr, Port,
+ Verbosity, [gen_server]).
+
+stop_sec_server(Addr, Port) ->
+ stop_permanent_worker(mod_security_server, Addr, Port).
+
+
+
+%%----------------------------------------------------------------------
+%% Function: start_permanent_worker/5
+%% Description: Starts a permanent worker (child) process
+%%----------------------------------------------------------------------
+
+start_permanent_worker(Mod, Addr, Port, Verbosity, Modules) ->
+ SupName = make_name(Addr, Port),
+ Spec = {{Mod, Addr, Port},
+ {Mod, start_link, [Addr, Port, Verbosity]},
+ permanent, timer:seconds(1), worker, [Mod] ++ Modules},
+ supervisor:start_child(SupName, Spec).
+
+
+%%----------------------------------------------------------------------
+%% Function: stop_permanent_worker/3
+%% Description: Stops a permanent worker (child) process
+%%----------------------------------------------------------------------
+
+stop_permanent_worker(Mod, Addr, Port) ->
+ SupName = make_name(Addr, Port),
+ Name = {Mod, Addr, Port},
+ case supervisor:terminate_child(SupName, Name) of
+ ok ->
+ supervisor:delete_child(SupName, Name);
+ Error ->
+ Error
+ end.
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_misc_sup",Addr,Port).
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_parse.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_parse.erl
new file mode 100644
index 0000000000..3f8f0837f9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_parse.erl
@@ -0,0 +1,348 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_parse.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_parse).
+-export([
+ request_header/1,
+ hsplit/2,
+ get_request_record/10,
+ split_lines/1,
+ tagup_header/1]).
+-include("httpd.hrl").
+
+
+%%----------------------------------------------------------------------
+%% request_header
+%%
+%% Input: The request as sent from the client (list of characters)
+%% (may include part of the entity body)
+%%
+%% Returns:
+%% {ok, Info#mod}
+%% {not_implemented,Info#mod}
+%% {bad_request, Reason}
+%%----------------------------------------------------------------------
+
+request_header(Header)->
+ [RequestLine|HeaderFields] = split_lines(Header),
+ ?DEBUG("request ->"
+ "~n RequestLine: ~p"
+ "~n Header: ~p",[RequestLine,Header]),
+ ParsedHeader = tagup_header(HeaderFields),
+ ?DEBUG("request ->"
+ "~n ParseHeader: ~p",[ParsedHeader]),
+ case verify_request(string:tokens(RequestLine," ")) of
+ ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+ {ok, ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ ["GET", RequestURI, "HTTP/0.9"] ->
+ {ok, ["GET", RequestURI, "HTTP/0.9", RequestLine, ParsedHeader]};
+ ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+ {ok, ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+ {ok, ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ %%HTTP must be 1.1 or higher
+ ["TRACE", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] when N>48->
+ {ok, ["TRACE", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ [Method, RequestURI] ->
+ {not_implemented, RequestLine, Method, RequestURI,ParsedHeader,"HTTP/0.9"};
+ [Method, RequestURI, HTTPVersion] ->
+ {not_implemented, RequestLine, Method, RequestURI,ParsedHeader, HTTPVersion};
+ {bad_request, Reason} ->
+ {bad_request, Reason};
+ Reason ->
+ {bad_request, "Unknown request method"}
+ end.
+
+
+
+
+
+
+%%----------------------------------------------------------------------
+%% The request is passed through the server as a record of type mod get it
+%% ----------------------------------------------------------------------
+
+get_request_record(Socket,SocketType,ConfigDB,Method,RequestURI,
+ HTTPVersion,RequestLine,ParsedHeader,EntityBody,InitData)->
+ PersistentConn=get_persistens(HTTPVersion,ParsedHeader,ConfigDB),
+ Info=#mod{init_data=InitData,
+ data=[],
+ socket_type=SocketType,
+ socket=Socket,
+ config_db=ConfigDB,
+ method=Method,
+ absolute_uri=formatAbsoluteURI(RequestURI,ParsedHeader),
+ request_uri=formatRequestUri(RequestURI),
+ http_version=HTTPVersion,
+ request_line=RequestLine,
+ parsed_header=ParsedHeader,
+ entity_body=maybe_remove_nl(ParsedHeader,EntityBody),
+ connection=PersistentConn},
+ {ok,Info}.
+
+%%----------------------------------------------------------------------
+%% Conmtrol wheater we shall maintain a persistent connection or not
+%%----------------------------------------------------------------------
+get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
+ case httpd_util:lookup(ConfigDB,persistent_conn,true) of
+ true->
+ case HTTPVersion of
+ %%If it is version prio to 1.1 kill the conneciton
+ [$H, $T, $T, $P, $\/, $1, $.,N] ->
+ case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of
+ %%if the connection isnt ordered to go down let it live
+ %%The keep-alive value is the older http/1.1 might be older
+ %%Clients that use it.
+ "keep-alive" when N >= 49 ->
+ ?DEBUG("CONNECTION MODE: ~p",[true]),
+ true;
+ "close" ->
+ ?DEBUG("CONNECTION MODE: ~p",[false]),
+ false;
+ Connect ->
+ ?DEBUG("CONNECTION MODE: ~p VALUE: ~p",[false,Connect]),
+ false
+ end;
+ _ ->
+ ?DEBUG("CONNECTION MODE: ~p VERSION: ~p",[false,HTTPVersion]),
+ false
+
+ end;
+ _ ->
+ false
+ end.
+
+
+
+
+%%----------------------------------------------------------------------
+%% Control whether the last newline of the body is a part of the message or
+%%it is a part of the multipart message.
+%%----------------------------------------------------------------------
+maybe_remove_nl(Header,Rest) ->
+ case find_content_type(Header) of
+ false ->
+ {ok,EntityBody,_}=regexp:sub(Rest,"\r\n\$",""),
+ EntityBody;
+ {ok, Value} ->
+ case string:str(Value, "multipart/form-data") of
+ 0 ->
+ {ok,EntityBody,_}=regexp:sub(Rest,"\r\n\$",""),
+ EntityBody;
+ _ ->
+ Rest
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%% Cet the content type of the incomming request
+%%----------------------------------------------------------------------
+
+
+find_content_type([]) ->
+ false;
+find_content_type([{Name,Value}|Tail]) ->
+ case httpd_util:to_lower(Name) of
+ "content-type" ->
+ {ok, Value};
+ _ ->
+ find_content_type(Tail)
+ end.
+
+%%----------------------------------------------------------------------
+%% Split the header to a list of strings where each string represents a
+%% HTTP header-field
+%%----------------------------------------------------------------------
+split_lines(Request) ->
+ split_lines(Request, [], []).
+split_lines([], CAcc, Acc) ->
+ lists:reverse([lists:reverse(CAcc)|Acc]);
+
+%%White space in the header fields are allowed but the new line must begin with LWS se
+%%rfc2616 chap 4.2. The rfc do not say what to
+split_lines([$\r, $\n, $\t |Rest], CAcc, Acc) ->
+ split_lines(Rest, [$\r, $\n |CAcc], Acc);
+
+split_lines([$\r, $\n, $\s |Rest], CAcc, Acc) ->
+ split_lines(Rest, [$\r, $\n |CAcc], Acc);
+
+split_lines([$\r, $\n|Rest], CAcc, Acc) ->
+ split_lines(Rest, [], [lists:reverse(CAcc)|Acc]);
+split_lines([Chr|Rest], CAcc, Acc) ->
+ split_lines(Rest, [Chr|CAcc], Acc).
+
+
+%%----------------------------------------------------------------------
+%% This is a 'hack' to stop people from trying to access directories/files
+%% relative to the ServerRoot.
+%%----------------------------------------------------------------------
+
+
+verify_request([Request, RequestURI]) ->
+ verify_request([Request, RequestURI, "HTTP/0.9"]);
+
+verify_request([Request, RequestURI, Protocol]) ->
+ NewRequestURI =
+ case string:str(RequestURI, "?") of
+ 0 ->
+ RequestURI;
+ Ndx ->
+ string:left(RequestURI, Ndx)
+ end,
+ case string:str(NewRequestURI, "..") of
+ 0 ->
+ [Request, RequestURI, Protocol];
+ _ ->
+ {bad_request, {forbidden, RequestURI}}
+ end;
+verify_request(Request) ->
+ Request.
+
+%%----------------------------------------------------------------------
+%% tagup_header
+%%
+%% Parses the header of a HTTP request and returns a key,value tuple
+%% list containing Name and Value of each header directive as of:
+%%
+%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
+%%
+%% But in http/1.1 the field-names are case insencitive so now it must be
+%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
+%% The standard furthermore says that leading and traling white space
+%% is not a part of the fieldvalue and shall therefore be removed.
+%%----------------------------------------------------------------------
+
+tagup_header([]) -> [];
+tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
+
+tag([], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), ""};
+tag([$:|Rest], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), httpd_util:strip(Rest)};
+tag([Chr|Rest], Tag) ->
+ tag(Rest, [Chr|Tag]).
+
+
+%%----------------------------------------------------------------------
+%% There are 3 possible forms of the reuqest URI
+%%
+%% 1. * When the request is not for a special assset. is is instead
+%% to the server itself
+%%
+%% 2. absoluteURI the whole servername port and asset is in the request
+%%
+%% 3. The most common form that http/1.0 used abs path that is a path
+%% to the requested asset.
+%5----------------------------------------------------------------------
+formatRequestUri("*")->
+ "*";
+formatRequestUri([$h,$t,$t,$p,$:,$\/,$\/|ServerAndPath]) ->
+ removeServer(ServerAndPath);
+
+formatRequestUri([$H,$T,$T,$P,$:,$\/,$\/|ServerAndPath]) ->
+ removeServer(ServerAndPath);
+
+formatRequestUri(ABSPath) ->
+ ABSPath.
+
+removeServer([$\/|Url])->
+ case Url of
+ []->
+ "/";
+ _->
+ [$\/|Url]
+ end;
+removeServer([N|Url]) ->
+ removeServer(Url).
+
+
+formatAbsoluteURI([$h,$t,$t,$p,$:,$\/,$\/|Uri],ParsedHeader)->
+ [$H,$T,$T,$P,$:,$\/,$\/|Uri];
+
+formatAbsoluteURI([$H,$T,$T,$P,$:,$\/,$\/|Uri],ParsedHeader)->
+ [$H,$T,$T,$P,$:,$\/,$\/|Uri];
+
+formatAbsoluteURI(Uri,ParsedHeader)->
+ case httpd_util:key1search(ParsedHeader,"host") of
+ undefined ->
+ nohost;
+ Host ->
+ Host++Uri
+ end.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%Code below is crap from an older version shall be removed when
+%%transformation to http/1.1 is finished
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+
+%request(Request) ->
+% ?DEBUG("request -> entry with:"
+% "~n Request: ~s",[Request]),
+ % {BeforeEntityBody, Rest} = hsplit([], Request),
+ % ?DEBUG("request ->"
+% "~n BeforeEntityBody: ~p"
+% "~n Rest: ~p",[BeforeEntityBody, Rest]),
+% [RequestLine|Header] = split_lines(BeforeEntityBody),
+% ?DEBUG("request ->"
+% "~n RequestLine: ~p"
+% "~n Header: ~p",[RequestLine,Header]),
+% ParsedHeader = tagup_header(Header),
+% ?DEBUG("request ->"
+% "~n ParseHeader: ~p",[ParsedHeader]),
+% EntityBody = maybe_remove_nl(ParsedHeader,Rest),
+% ?DEBUG("request ->"
+% "~n EntityBody: ~p",[EntityBody]),
+% case verify_request(string:tokens(RequestLine," ")) of
+% ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+% {ok, ["HEAD", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+% ParsedHeader, EntityBody]};
+% ["GET", RequestURI, "HTTP/0.9"] ->
+% {ok, ["GET", RequestURI, "HTTP/0.9", RequestLine, ParsedHeader,
+% EntityBody]};
+% ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+% {ok, ["GET", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+% ParsedHeader,EntityBody]};
+%% ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+% {ok, ["POST", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+% ParsedHeader, EntityBody]};
+% [Method, RequestURI] ->
+% {not_implemented, RequestLine, Method, RequestURI,ParsedHeader,"HTTP/0.9"};
+% [Method, RequestURI, HTTPVersion] ->
+% {not_implemented, RequestLine, Method, RequestURI,ParsedHeader, HTTPVersion};
+% {bad_request, Reason} ->
+% {bad_request, Reason};
+% Reason ->
+% {bad_request, "Unknown request method"}
+% end.
+
+hsplit(Accu,[]) ->
+ {lists:reverse(Accu), []};
+hsplit(Accu, [ $\r, $\n, $\r, $\n | Tail]) ->
+ {lists:reverse(Accu), Tail};
+hsplit(Accu, [H|T]) ->
+ hsplit([H|Accu],T).
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_request_handler.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_request_handler.erl
new file mode 100644
index 0000000000..5008e6022e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_request_handler.erl
@@ -0,0 +1,995 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_request_handler.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_request_handler).
+
+%% app internal api
+-export([start_link/2, synchronize/3]).
+
+%% module internal api
+-export([connection/2, do_next_connection/6, read_header/7]).
+-export([parse_trailers/1, newline/1]).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+
+%% start_link
+
+start_link(Manager, ConfigDB) ->
+ Pid = proc_lib:spawn(?MODULE, connection, [Manager, ConfigDB]),
+ {ok, Pid}.
+
+
+%% synchronize
+
+synchronize(Pid, SocketType, Socket) ->
+ Pid ! {synchronize, SocketType, Socket}.
+
+% connection
+
+connection(Manager, ConfigDB) ->
+ {SocketType, Socket, {Status, Verbosity}} = await_synchronize(Manager),
+ put(sname,self()),
+ put(verbosity,?vvalidate(Verbosity)),
+ connection1(Status, Manager, ConfigDB, SocketType, Socket).
+
+
+connection1({reject, busy}, Manager, ConfigDB, SocketType, Socket) ->
+ handle_busy(Manager, ConfigDB, SocketType, Socket);
+
+connection1({reject, blocked}, Manager, ConfigDB, SocketType, Socket) ->
+ handle_blocked(Manager, ConfigDB, SocketType, Socket);
+
+connection1(accept, Manager, ConfigDB, SocketType, Socket) ->
+ handle_connection(Manager, ConfigDB, SocketType, Socket).
+
+
+%% await_synchronize
+
+await_synchronize(Manager) ->
+ receive
+ {synchronize, SocketType, Socket} ->
+ ?vlog("received syncronize: "
+ "~n SocketType: ~p"
+ "~n Socket: ~p", [SocketType, Socket]),
+ {SocketType, Socket, httpd_manager:new_connection(Manager)}
+ after 5000 ->
+ exit(synchronize_timeout)
+ end.
+
+
+% handle_busy
+
+handle_busy(Manager, ConfigDB, SocketType, Socket) ->
+ ?vlog("handle busy: ~p", [Socket]),
+ MaxClients = httpd_util:lookup(ConfigDB, max_clients, 150),
+ String = io_lib:format("heavy load (>~w processes)", [MaxClients]),
+ reject_connection(Manager, ConfigDB, SocketType, Socket, String).
+
+
+% handle_blocked
+
+handle_blocked(Manager, ConfigDB, SocketType, Socket) ->
+ ?vlog("handle blocked: ~p", [Socket]),
+ String = "Server maintenance performed, try again later",
+ reject_connection(Manager, ConfigDB, SocketType, Socket, String).
+
+
+% reject_connection
+
+reject_connection(Manager, ConfigDB, SocketType, Socket, Info) ->
+ String = lists:flatten(Info),
+ ?vtrace("send status (503) message", []),
+ httpd_response:send_status(SocketType, Socket, 503, String, ConfigDB),
+ %% This ugly thing is to make ssl deliver the message, before the close...
+ close_sleep(SocketType, 1000),
+ ?vtrace("close the socket", []),
+ close(SocketType, Socket, ConfigDB).
+
+
+% handle_connection
+
+handle_connection(Manager, ConfigDB, SocketType, Socket) ->
+ ?vlog("handle connection: ~p", [Socket]),
+ Resolve = httpd_socket:resolve(SocketType),
+ Peername = httpd_socket:peername(SocketType, Socket),
+ InitData = #init_data{peername=Peername, resolve=Resolve},
+ TimeOut = httpd_util:lookup(ConfigDB, keep_alive_timeout, 150000),
+ NrOfRequest = httpd_util:lookup(ConfigDB, max_keep_alive_request, forever),
+ ?MODULE:do_next_connection(ConfigDB, InitData,
+ SocketType, Socket,NrOfRequest,TimeOut),
+ ?vlog("handle connection: done", []),
+ httpd_manager:done_connection(Manager),
+ ?vlog("handle connection: close socket", []),
+ close(SocketType, Socket, ConfigDB).
+
+
+% do_next_connection
+do_next_connection(_ConfigDB, _InitData, _SocketType, _Socket, NrOfRequests,
+ _Timeout) when NrOfRequests < 1 ->
+ ?vtrace("do_next_connection: done", []),
+ ok;
+do_next_connection(ConfigDB, InitData, SocketType, Socket, NrOfRequests,
+ Timeout) ->
+ Peername = InitData#init_data.peername,
+ case (catch read(ConfigDB, SocketType, Socket, InitData, Timeout)) of
+ {'EXIT', Reason} ->
+ ?vlog("exit reading from socket: ~p",[Reason]),
+ error_logger:error_report({'EXIT',Reason}),
+ String =
+ lists:flatten(
+ io_lib:format("exit reading from socket: ~p => ~n~p~n",
+ [Socket, Reason])),
+ error_log(mod_log,
+ SocketType, Socket, ConfigDB, Peername, String),
+ error_log(mod_disk_log,
+ SocketType, Socket, ConfigDB, Peername, String);
+ {error, Reason} ->
+ handle_read_error(Reason,SocketType,Socket,ConfigDB,Peername);
+ Info when record(Info, mod) ->
+ case Info#mod.connection of
+ true ->
+ ReqTimeout = httpd_util:lookup(ConfigDB,
+ keep_alive_timeout, 150000),
+ ?MODULE:do_next_connection(ConfigDB, InitData,
+ SocketType, Socket,
+ dec(NrOfRequests), ReqTimeout);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end.
+
+
+
+%% read
+read(ConfigDB, SocketType, Socket, InitData, Timeout) ->
+ ?vdebug("read from socket ~p with Timeout ~p",[Socket, Timeout]),
+ MaxHdrSz = httpd_util:lookup(ConfigDB, max_header_size, 10240),
+ case ?MODULE:read_header(SocketType, Socket, Timeout, MaxHdrSz,
+ ConfigDB, InitData, []) of
+ {socket_closed, Reason} ->
+ ?vlog("Socket closed while reading request header: "
+ "~n ~p", [Reason]),
+ socket_close;
+ {error, Error} ->
+ {error, Error};
+ {ok, Info, EntityBodyPart} ->
+ read1(SocketType, Socket, ConfigDB, InitData, Timeout, Info,
+ EntityBodyPart)
+ end.
+
+%% Got the head and maybe a part of the body: read in the rest
+read1(SocketType, Socket, ConfigDB, InitData, Timeout, Info, BodyPart)->
+ MaxBodySz = httpd_util:lookup(ConfigDB, max_body_size, nolimit),
+ ContentLength = content_length(Info),
+ ?vtrace("ContentLength: ~p", [ContentLength]),
+ case read_entity_body(SocketType, Socket, Timeout, MaxBodySz,
+ ContentLength, BodyPart, Info, ConfigDB) of
+ {socket_closed, Reason} ->
+ ?vlog("Socket closed while reading request body: "
+ "~n ~p", [Reason]),
+ socket_close;
+ {ok, EntityBody} ->
+ finish_request(EntityBody, [], Info);
+ {ok, ExtraHeader, EntityBody} ->
+ finish_request(EntityBody, ExtraHeader, Info);
+ Response ->
+ httpd_socket:close(SocketType, Socket),
+ socket_closed
+ %% Catch up all bad return values
+ end.
+
+
+%% The request is read in send it forward to the module that
+%% generates the response
+
+finish_request(EntityBody, ExtraHeader,
+ #mod{parsed_header = ParsedHeader} = Info)->
+ ?DEBUG("finish_request -> ~n"
+ " EntityBody: ~p~n"
+ " ExtraHeader: ~p~n"
+ " ParsedHeader: ~p~n",
+ [EntityBody, ExtraHeader, ParsedHeader]),
+ httpd_response:send(Info#mod{parsed_header = ParsedHeader ++ ExtraHeader,
+ entity_body = EntityBody}).
+
+
+%% read_header
+
+%% This algorithm rely on the buffer size of the inet driver together
+%% with the {active, once} socket option. Atmost one message of this
+%% size will be received at a given time. When a full header has been
+%% read, the body is read with the recv function (the body size is known).
+%%
+read_header(SocketType, Socket, Timeout, MaxHdrSz, ConfigDB,
+ InitData, SoFar0) ->
+ T = t(),
+ %% remove any newlines at the begining, they might be crap from ?
+ SoFar = remove_newline(SoFar0),
+
+ case terminated_header(MaxHdrSz, SoFar) of
+ {true, Header, EntityBodyPart} ->
+ ?vdebug("read_header -> done reading header: "
+ "~n length(Header): ~p"
+ "~n length(EntityBodyPart): ~p",
+ [length(Header), length(EntityBodyPart)]),
+ transform_header(SocketType, Socket, Header, ConfigDB, InitData,
+ EntityBodyPart);
+ false ->
+ ?vtrace("read_header -> "
+ "~n set active = 'once' and "
+ "await a chunk of the header", []),
+
+ case httpd_socket:active_once(SocketType, Socket) of
+ ok ->
+ receive
+ %%
+ %% TCP
+ %%
+ {tcp, Socket, Data} ->
+ ?vtrace("read_header(ip) -> got some data: ~p",
+ [sz(Data)]),
+ ?MODULE:read_header(SocketType, Socket,
+ Timeout - (t()-T),
+ MaxHdrSz, ConfigDB,
+ InitData, SoFar ++ Data);
+ {tcp_closed, Socket} ->
+ ?vtrace("read_header(ip) -> socket closed",[]),
+ {socket_closed,normal};
+ {tcp_error, Socket, Reason} ->
+ ?vtrace("read_header(ip) -> socket error: ~p",
+ [Reason]),
+ {socket_closed, Reason};
+
+ %%
+ %% SSL
+ %%
+ {ssl, Socket, Data} ->
+ ?vtrace("read_header(ssl) -> got some data: ~p",
+ [sz(Data)]),
+ ?MODULE:read_header(SocketType, Socket,
+ Timeout - (t()-T),
+ MaxHdrSz, ConfigDB,
+ InitData, SoFar ++ Data);
+ {ssl_closed, Socket} ->
+ ?vtrace("read_header(ssl) -> socket closed", []),
+ {socket_closed, normal};
+ {ssl_error, Socket, Reason} ->
+ ?vtrace("read_header(ssl) -> socket error: ~p",
+ [Reason]),
+ {socket_closed, Reason}
+
+ after Timeout ->
+ ?vlog("read_header -> timeout", []),
+ {socket_closed, timeout}
+ end;
+
+ Error ->
+ httpd_response:send_status(SocketType, Socket,
+ 500, none, ConfigDB),
+ Error
+ end
+ end.
+
+
+terminated_header(MaxHdrSz, Data) ->
+ D1 = lists:flatten(Data),
+ ?vtrace("terminated_header -> Data size: ~p",[sz(D1)]),
+ case hsplit(MaxHdrSz,[],D1) of
+ not_terminated ->
+ false;
+ [Header, EntityBodyPart] ->
+ {true, Header++"\r\n\r\n",EntityBodyPart}
+ end.
+
+
+transform_header(SocketType, Socket, Request, ConfigDB, InitData, BodyPart) ->
+ case httpd_parse:request_header(Request) of
+ {not_implemented, RequestLine, Method, RequestURI, ParsedHeader,
+ HTTPVersion} ->
+ httpd_response:send_status(SocketType, Socket, 501,
+ {Method, RequestURI, HTTPVersion},
+ ConfigDB),
+ {error,"Not Implemented"};
+ {bad_request, {forbidden, URI}} ->
+ httpd_response:send_status(SocketType, Socket, 403, URI, ConfigDB),
+ {error,"Forbidden Request"};
+ {bad_request, Reason} ->
+ httpd_response:send_status(SocketType, Socket, 400, none,
+ ConfigDB),
+ {error,"Malformed request"};
+ {ok,[Method, RequestURI, HTTPVersion, RequestLine, ParsedHeader]} ->
+ ?DEBUG("send -> ~n"
+ " Method: ~p~n"
+ " RequestURI: ~p~n"
+ " HTTPVersion: ~p~n"
+ " RequestLine: ~p~n",
+ [Method, RequestURI, HTTPVersion, RequestLine]),
+ {ok, Info} =
+ httpd_parse:get_request_record(Socket, SocketType, ConfigDB,
+ Method, RequestURI, HTTPVersion,
+ RequestLine, ParsedHeader,
+ [], InitData),
+ %% Control that the Host header field is provided
+ case Info#mod.absolute_uri of
+ nohost ->
+ case Info#mod.http_version of
+ "HTTP/1.1" ->
+ httpd_response:send_status(Info, 400, none),
+ {error,"No host specified"};
+ _ ->
+ {ok, Info, BodyPart}
+ end;
+ _ ->
+ {ok, Info, BodyPart}
+ end
+ end.
+
+
+hsplit(_MaxHdrSz, Accu,[]) ->
+ not_terminated;
+hsplit(_MaxHdrSz, Accu, [ $\r, $\n, $\r, $\n | Tail]) ->
+ [lists:reverse(Accu), Tail];
+hsplit(nolimit, Accu, [H|T]) ->
+ hsplit(nolimit,[H|Accu],T);
+hsplit(MaxHdrSz, Accu, [H|T]) when length(Accu) < MaxHdrSz ->
+ hsplit(MaxHdrSz,[H|Accu],T);
+hsplit(MaxHdrSz, Accu, D) ->
+ throw({error,{header_too_long,length(Accu),length(D)}}).
+
+
+
+%%----------------------------------------------------------------------
+%% The http/1.1 standard chapter 8.2.3 says that a request containing
+%% An Except header-field must be responded to by 100 (Continue) by
+%% the server before the client sends the body.
+%%----------------------------------------------------------------------
+
+read_entity_body(SocketType, Socket, Timeout, Max, Length, BodyPart, Info,
+ ConfigDB) when integer(Max) ->
+ case expect(Info#mod.http_version, Info#mod.parsed_header, ConfigDB) of
+ continue when Max > Length ->
+ ?DEBUG("read_entity_body()->100 Continue ~n", []),
+ httpd_response:send_status(Info, 100, ""),
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ continue when Max < Length ->
+ httpd_response:send_status(Info, 417, "Body to big"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect denied according to size"};
+ break ->
+ httpd_response:send_status(Info, 417, "Method not allowed"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect conditions was not fullfilled"};
+ no_expect_header ->
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ http_1_0_expect_header ->
+ httpd_response:send_status(Info, 400,
+ "Only HTTP/1.1 Clients "
+ "may use the Expect Header"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Due to a HTTP/1.0 expect header"}
+ end;
+
+read_entity_body(SocketType, Socket, Timeout, Max, Length, BodyPart,
+ Info, ConfigDB) ->
+ case expect(Info#mod.http_version, Info#mod.parsed_header, ConfigDB) of
+ continue ->
+ ?DEBUG("read_entity_body() -> 100 Continue ~n", []),
+ httpd_response:send_status(Info, 100, ""),
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ break->
+ httpd_response:send_status(Info, 417, "Method not allowed"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect conditions was not fullfilled"};
+ no_expect_header ->
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ http_1_0_expect_header ->
+ httpd_response:send_status(Info, 400,
+ "HTTP/1.0 Clients are not allowed "
+ "to use the Expect Header"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect header field in an HTTP/1.0 request"}
+ end.
+
+%%----------------------------------------------------------------------
+%% control if the body is transfer encoded
+%%----------------------------------------------------------------------
+read_entity_body2(SocketType, Socket, Timeout, Max, Length, BodyPart,
+ Info, ConfigDB) ->
+ ?DEBUG("read_entity_body2() -> "
+ "~n Max: ~p"
+ "~n Length: ~p"
+ "~n Socket: ~p", [Max, Length, Socket]),
+
+ case transfer_coding(Info) of
+ {chunked, ChunkedData} ->
+ ?DEBUG("read_entity_body2() -> "
+ "Transfer-encoding: Chunked Data: BodyPart ~s", [BodyPart]),
+ read_chunked_entity(Info, Timeout, Max, Length, ChunkedData, [],
+ BodyPart);
+ unknown_coding ->
+ ?DEBUG("read_entity_body2() -> Transfer-encoding: Unknown",[]),
+ httpd_response:send_status(Info, 501, "Unknown Transfer-Encoding"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect conditions was not fullfilled"};
+ none ->
+ ?DEBUG("read_entity_body2() -> Transfer-encoding: none",[]),
+ read_entity_body(SocketType, Socket, Timeout, Max, Length,
+ BodyPart)
+ end.
+
+
+%%----------------------------------------------------------------------
+%% The body was plain read it from the socket
+%% ----------------------------------------------------------------------
+read_entity_body(_SocketType, _Socket, _Timeout, _Max, 0, _BodyPart) ->
+ {ok, []};
+
+read_entity_body(_SocketType, _Socket, _Timeout, Max, Len, _BodyPart)
+ when Max < Len ->
+ ?vlog("body to long: "
+ "~n Max: ~p"
+ "~n Len: ~p", [Max,Len]),
+ throw({error,{body_too_long,Max,Len}});
+
+%% OTP-4409: Fixing POST problem
+read_entity_body(_,_,_,_, Len, BodyPart) when Len == length(BodyPart) ->
+ ?vtrace("read_entity_body -> done when"
+ "~n Len = length(BodyPart): ~p", [Len]),
+ {ok, BodyPart};
+
+%% OTP-4550: Fix problem with trailing garbage produced by some clients.
+read_entity_body(_, _, _, _, Len, BodyPart) when Len < length(BodyPart) ->
+ ?vtrace("read_entity_body -> done when"
+ "~n Len: ~p"
+ "~n length(BodyPart): ~p", [Len, length(BodyPart)]),
+ {ok, lists:sublist(BodyPart,Len)};
+
+read_entity_body(SocketType, Socket, Timeout, Max, Len, BodyPart) ->
+ ?vtrace("read_entity_body -> entry when"
+ "~n Len: ~p"
+ "~n length(BodyPart): ~p", [Len, length(BodyPart)]),
+ %% OTP-4548:
+ %% The length calculation was previously (inets-2.*) done in the
+ %% read function. As of 3.0 it was removed from read but not
+ %% included here.
+ L = Len - length(BodyPart),
+ case httpd_socket:recv(SocketType, Socket, L, Timeout) of
+ {ok, Body} ->
+ ?vtrace("read_entity_body -> received some data:"
+ "~n length(Body): ~p", [length(Body)]),
+ {ok, BodyPart ++ Body};
+ {error,closed} ->
+ {socket_closed,normal};
+ {error,etimedout} ->
+ {socket_closed, timeout};
+ {error,Reason} ->
+ {socket_closed, Reason};
+ Other ->
+ {socket_closed, Other}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% If the body of the message is encoded used the chunked transfer encoding
+%% it looks somethin like this:
+%% METHOD URI HTTP/VSN
+%% Transfer-Encoding: chunked
+%% CRLF
+%% ChunkSize
+%% Chunk
+%% ChunkSize
+%% Chunk
+%% 0
+%% Trailer
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+read_chunked_entity(Info, Timeout, Max, Length, ChunkedData, Body, []) ->
+ ?DEBUG("read_chunked_entity()->:no_chunks ~n", []),
+ read_chunked_entity(Info#mod.socket_type, Info#mod.socket,
+ Timeout, Max, Length, ChunkedData, Body,
+ Info#mod.config_db, Info);
+
+read_chunked_entity(Info, Timeout, Max, Length, ChunkedData, Body, BodyPart) ->
+ %% Get the size
+ ?DEBUG("read_chunked_entity() -> PrefetchedBodyPart: ~p ~n",[BodyPart]),
+ case parse_chunk_size(Info, Timeout, BodyPart) of
+ {ok, Size, NewBodyPart} when Size > 0 ->
+ ?DEBUG("read_chunked_entity() -> Size: ~p ~n", [Size]),
+ case parse_chunked_entity_body(Info, Timeout, Max, length(Body),
+ Size, NewBodyPart) of
+ {ok, Chunk, NewBodyPart1} ->
+ ?DEBUG("read_chunked_entity()->Size: ~p ~n", [Size]),
+ read_chunked_entity(Info, Timeout, Max, Length,
+ ChunkedData, Body ++ Chunk,
+ NewBodyPart1);
+ OK ->
+ httpd_socket:close(Info#mod.socket_type, Info#mod.socket),
+ {socket_closed, error}
+ end;
+ {ok, 0, Trailers} ->
+ ?DEBUG("read_chunked_entity()->Size: 0, Trailers: ~s Body: ~s ~n",
+ [Trailers, Body]),
+ case parse_chunk_trailer(Info, Timeout, Info#mod.config_db,
+ Trailers) of
+ {ok, TrailerFields} ->
+ {ok, TrailerFields, Body};
+ _->
+ {ok, []}
+ end;
+ Error ->
+ Error
+ end.
+
+
+parse_chunk_size(Info, Timeout, BodyPart) ->
+ case httpd_util:split(remove_newline(BodyPart), "\r\n", 2) of
+ {ok, [Size, Body]} ->
+ ?DEBUG("parse_chunk_size()->Size: ~p ~n", [Size]),
+ {ok, httpd_util:hexlist_to_integer(Size), Body};
+ {ok, [Size]} ->
+ ?DEBUG("parse_chunk_size()->Size: ~p ~n", [Size]),
+ Sz = get_chunk_size(Info#mod.socket_type,
+ Info#mod.socket, Timeout,
+ lists:reverse(Size)),
+ {ok, Sz, []}
+ end.
+
+%%----------------------------------------------------------------------
+%% We got the chunk size get the chunk
+%%
+%% Max: Max numbers of bytes to read may also be undefined
+%% Length: Numbers of bytes already read
+%% Size Numbers of byte to read for the chunk
+%%----------------------------------------------------------------------
+
+%% body to big
+parse_chunked_entity_body(Info, Timeout, Max, Length, Size, BodyPart)
+ when Max =< (Length + Size) ->
+ {error, body_to_big};
+
+%% Prefetched body part is bigger than the current chunk
+%% (i.e. BodyPart includes more than one chunk)
+parse_chunked_entity_body(Info, Timeout, Max, Length, Size, BodyPart)
+ when (Size+2) =< length(BodyPart) ->
+ Chunk = string:substr(BodyPart, 1, Size),
+ Rest = string:substr(BodyPart, Size+3),
+ ?DEBUG("parse_chunked_entity_body() -> ~nChunk: ~s ~nRest: ~s ~n",
+ [Chunk, Rest]),
+ {ok, Chunk, Rest};
+
+
+%% We just got a part of the current chunk
+parse_chunked_entity_body(Info, Timeout, Max, Length, Size, BodyPart) ->
+ %% OTP-4551:
+ %% Subtracting BodyPart from Size does not produce an integer
+ %% when BodyPart is a list...
+ Remaining = Size - length(BodyPart),
+ LastPartOfChunk = read_chunked_entity_body(Info#mod.socket_type,
+ Info#mod.socket,
+ Timeout, Max,
+ Length, Remaining),
+ %% Remove newline
+ httpd_socket:recv(Info#mod.socket_type, Info#mod.socket, 2, Timeout),
+ ?DEBUG("parse_chunked_entity_body() -> "
+ "~nBodyPart: ~s"
+ "~nLastPartOfChunk: ~s ~n",
+ [BodyPart, LastPartOfChunk]),
+ {ok, BodyPart ++ LastPartOfChunk, []}.
+
+
+%%----------------------------------------------------------------------
+%% If the data we got along with the header contained the whole chunked body
+%% It may aswell contain the trailer :-(
+%%----------------------------------------------------------------------
+%% Either trailer begins with \r\n and then all data is there or
+%% The trailer has data then read upto \r\n\r\n
+parse_chunk_trailer(Info,Timeout,ConfigDB,"\r\n")->
+ {ok,[]};
+parse_chunk_trailer(Info,Timeout,ConfigDB,Trailers) ->
+ ?DEBUG("parse_chunk_trailer()->Trailers: ~s ~n", [Trailers]),
+ case string:rstr(Trailers,"\r\n\r\n") of
+ 0 ->
+ MaxHdrSz=httpd_util:lookup(ConfigDB, max_header_size, 10240),
+ read_trailer_end(Info,Timeout,MaxHdrSz,Trailers);
+ _->
+ %%We got the whole header parse it up
+ parse_trailers(Trailers)
+ end.
+
+parse_trailers(Trailer)->
+ ?DEBUG("parse_trailer()->Trailer: ~s",[Trailer]),
+ {ok,[Fields0|Crap]}=httpd_util:split(Trailer,"\r\n\r\n",2),
+ Fields=string:tokens(Fields0,"\r\n"),
+ [getTrailerField(X)||X<-Fields,lists:member($:,X)].
+
+
+read_trailer_end(Info,Timeout,MaxHdrSz,[])->
+ ?DEBUG("read_trailer_end()->[]",[]),
+ case read_trailer(Info#mod.socket_type,Info#mod.socket,
+ Timeout,MaxHdrSz,[],[],
+ httpd_util:key1search(Info#mod.parsed_header,"trailer",[])) of
+ {ok,Trailers}->
+ Trailers;
+ _->
+ []
+ end;
+read_trailer_end(Info,Timeout,MaxHdrSz,Trailers)->
+ ?DEBUG("read_trailer_end()->Trailers: ~s ~n ",[Trailers]),
+ %% Get the last paart of the the last headerfield
+ End=lists:reverse(lists:takewhile(fun(X)->case X of 10 ->false;13->false;_ ->true end end,lists:reverse(Trailers))),
+ Fields0=regexp:split(Trailers,"\r\n"),
+ %%Get rid of the last header field
+ [_Last|Fields]=lists:reverse(Fields0),
+ Headers=[getTrailerField(X)||X<-Fields,lists:member($:,X)],
+ case read_trailer(Info#mod.socket_type,Info#mod.socket,
+ Timeout,MaxHdrSz,Headers,End,
+ httpd_util:key1search(Info#mod.parsed_header,"trailer",[])) of
+ {ok,Trailers}->
+ Trailers;
+ _->
+ []
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% The code below is a a good way to read in chunked encoding but
+%% that require that the encoding comes from a stream and not from a list
+%%&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+
+%%----------------------------------------------------------------------
+%% The body is encoded by chubnked encoding read it in
+%% ChunkedData= Chunked extensions
+%% Body= the inread chunked body
+%% Max: Max numbers of bytes to read
+%% Length: Numbers of bytes already readed
+%% Size Numbers of byte to read for the chunk
+%%----------------------------------------------------------------------
+
+
+
+read_chunked_entity(SocketType, Socket, Timeout, Max, Length, ChunkedData,
+ Body, ConfigDB, Info) ->
+ T = t(),
+ case get_chunk_size(SocketType,Socket,Timeout,[]) of
+ Size when integer(Size), Size>0 ->
+ case read_chunked_entity_body(SocketType, Socket,
+ Timeout-(t()-T),
+ Max, length(Body), Size) of
+ {ok,Chunk} ->
+ ?DEBUG("read_chunked_entity/9 Got a chunk: ~p " ,[Chunk]),
+ %% Two bytes are left of the chunk, that is the CRLF
+ %% at the end that is not a part of the message
+ %% So we read it and do nothing with it.
+ httpd_socket:recv(SocketType,Socket,2,Timeout-(t()-T)),
+ read_chunked_entity(SocketType, Socket, Timeout-(t()-T),
+ Max, Length, ChunkedData, Body++Chunk,
+ ConfigDB, Info);
+ Error ->
+ ?DEBUG("read_chunked_entity/9 Error: ~p " ,[Error]),
+ httpd_socket:close(SocketType,Socket),
+ {socket_closed,error}
+ end;
+ Size when integer(Size), Size == 0 ->
+ %% Must read in any trailer fields here
+ read_chunk_trailer(SocketType, Socket, Timeout,
+ Max, Info, ChunkedData, Body, ConfigDB);
+ Error ->
+ Error
+ end.
+
+
+%% If a user wants to send header data after the chunked data we
+%% must pick it out
+read_chunk_trailer(SocketType, Socket, Timeout, Max, Info, ChunkedData,
+ Body, ConfigDB) ->
+ ?DEBUG("read_chunk_trailer/8: ~p " ,[Body]),
+ MaxHdrSz = httpd_util:lookup(ConfigDB,max_header_size,10240),
+ case httpd_util:key1search(Info#mod.parsed_header,"trailer")of
+ undefined ->
+ {ok,Body};
+ Fields ->
+ case read_trailer(SocketType, Socket, Timeout,
+ MaxHdrSz, [], [],
+ string:tokens(
+ httpd_util:to_lower(Fields),",")) of
+ {ok,[]} ->
+ {ok,Body};
+ {ok,HeaderFields} ->
+ % ParsedExtraHeaders =
+ % httpd_parse:tagup_header(httpd_parse:split_lines(HeaderFields)),
+ {ok,HeaderFields,Body};
+ Error ->
+ Error
+ end
+ end.
+
+read_chunked_entity_body(SocketType, Socket, Timeout, Max, Length, Size)
+ when integer(Max) ->
+ read_entity_body(SocketType, Socket, Timeout, Max-Length, Size, []);
+
+read_chunked_entity_body(SocketType, Socket, Timeout, Max, _Length, Size) ->
+ read_entity_body(SocketType, Socket, Timeout, Max, Size, []).
+
+%% If we read in the \r\n the httpd_util:hexlist_to_integer
+%% Will remove it and we get rid of it emmediatly :-)
+get_chunk_size(SocketType, Socket, Timeout, Size) ->
+ T = t(),
+ ?DEBUG("get_chunk_size: ~p " ,[Size]),
+ case httpd_socket:recv(SocketType,Socket,1,Timeout) of
+ {ok,[Digit]} when Digit==$\n ->
+ httpd_util:hexlist_to_integer(lists:reverse(Size));
+ {ok,[Digit]} ->
+ get_chunk_size(SocketType,Socket,Timeout-(t()-T),[Digit|Size]);
+ {error,closed} ->
+ {socket_closed,normal};
+ {error,etimedout} ->
+ {socket_closed, timeout};
+ {error,Reason} ->
+ {socket_closed, Reason};
+ Other ->
+ {socket_closed,Other}
+ end.
+
+
+
+
+%%----------------------------------------------------------------------
+%% Reads the HTTP-trailer
+%% Would be easy to tweak the read_head to do this but in this way
+%% the chunked encoding can be updated better.
+%%----------------------------------------------------------------------
+
+
+%% When end is reached
+%% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,Last,[]) ->
+%% {ok,Headers};
+
+%% When header to big
+read_trailer(_,_,_,MaxHdrSz,Headers,Bs,_Fields)
+ when MaxHdrSz < length(Headers) ->
+ ?vlog("header to long: "
+ "~n MaxHdrSz: ~p"
+ "~n length(Bs): ~p", [MaxHdrSz,length(Bs)]),
+ throw({error,{header_too_long,MaxHdrSz,length(Bs)}});
+
+%% The last Crlf is there
+read_trailer(_, _, _, _, Headers, [$\n, $\r], _) ->
+ {ok,Headers};
+
+read_trailer(SocketType, Socket, Timeout, MaxHdrSz, Headers,
+ [$\n, $\r|Rest], Fields) ->
+ case getTrailerField(lists:reverse(Rest))of
+ {error,Reason}->
+ {error,"Bad trailer"};
+ {HeaderField,Value}->
+ case lists:member(HeaderField,Fields) of
+ true ->
+ read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+ [{HeaderField,Value} |Headers],[],
+ lists:delete(HeaderField,Fields));
+ false ->
+ read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+ Headers,[],Fields)
+ end
+ end;
+
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,[$\n, $\r|Rest],Fields) ->
+% case Rest of
+% [] ->
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,Rest,Fields);
+% Field ->
+% case getTrailerField(lists:reverse(Rest))of
+% {error,Reason}->
+% {error,"Bad trailer"};
+% {HeaderField,Value}->
+% case lists:member(HeaderField,Fields) of
+% true ->
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+% [{HeaderField,Value} |Headers],[],
+% lists:delete(HeaderField,Fields));
+% false ->
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+% Headers,[],Fields)
+% end
+% end
+% end;
+
+read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,Bs,Fields) ->
+ %% ?vlog("read_header -> entry with Timeout: ~p",[Timeout]),
+ T = t(),
+ case (catch httpd_socket:recv(SocketType,Socket,1,Timeout)) of
+ {ok,[B]} ->
+ read_trailer(SocketType, Socket, Timeout-(t()-T),
+ MaxHdrSz, Headers, [B|Bs], Fields);
+ {error,closed} ->
+ {socket_closed,normal};
+ {error,etimedout} ->
+ {socket_closed, timeout};
+ {error,Reason} ->
+ {socket_closed, Reason};
+ Other ->
+ {socket_closed,Other}
+ end.
+
+getTrailerField(HeaderField)->
+ case string:str(HeaderField,":") of
+ 0->
+ {error,"badheaderfield"};
+ Number ->
+ {httpd_util:to_lower(string:substr(HeaderField,1,Number-1)),
+ httpd_util:to_lower(string:substr(HeaderField,Number+1))}
+ end.
+
+
+
+
+%% Time in milli seconds
+t() ->
+ {A,B,C} = erlang:now(),
+ A*1000000000+B*1000+(C div 1000).
+
+%%----------------------------------------------------------------------
+%% If the user sends an expect header-field with the value 100-continue
+%% We must send a 100 status message if he is a HTTP/1.1 client.
+
+%% If it is an HTTP/1.0 client it's little more difficult.
+%% If expect is not defined it is easy but in the other case shall we
+%% Break or the transmission or let it continue the standard is not clear
+%% if to break connection or wait for data.
+%%----------------------------------------------------------------------
+expect(HTTPVersion,ParsedHeader,ConfigDB)->
+ case HTTPVersion of
+ [$H,$T,$T,$P,$\/,$1,$.,N|_Whatever]when N>=1->
+ case httpd_util:key1search(ParsedHeader,"expect") of
+ "100-continue" ->
+ continue;
+ undefined ->
+ no_expect_header;
+ NewValue ->
+ break
+ end;
+ _OldVersion ->
+ case httpd_util:key1search(ParsedHeader,"expect") of
+ undefined ->
+ no_expect_header;
+ NewValue ->
+ case httpd_util:lookup(ConfigDB,expect,continue) of
+ continue->
+ no_expect_header;
+ _ ->
+ http_1_0_expect_header
+ end
+ end
+ end.
+
+
+%%----------------------------------------------------------------------
+%% According to the http/1.1 standard all applications must understand
+%% Chunked encoded data. (Last line chapter 3.6.1).
+transfer_coding(#mod{parsed_header = Ph}) ->
+ case httpd_util:key1search(Ph, "transfer-encoding", none) of
+ none ->
+ none;
+ [$c,$h,$u,$n,$k,$e,$d|Data]->
+ {chunked,Data};
+ _ ->
+ unknown_coding
+ end.
+
+
+
+handle_read_error({header_too_long,Max,Rem},
+ SocketType,Socket,ConfigDB,Peername) ->
+ String = io_lib:format("header too long: ~p : ~p",[Max,Rem]),
+ handle_read_error(ConfigDB,String,SocketType,Socket,Peername,
+ max_header_action,close);
+handle_read_error({body_too_long,Max,Actual},
+ SocketType,Socket,ConfigDB,Peername) ->
+ String = io_lib:format("body too long: ~p : ~p",[Max,Actual]),
+ handle_read_error(ConfigDB,String,SocketType,Socket,Peername,
+ max_body_action,close);
+handle_read_error(Error,SocketType,Socket,ConfigDB,Peername) ->
+ ok.
+
+
+handle_read_error(ConfigDB, ReasonString, SocketType, Socket, Peername,
+ Item, Default) ->
+ ?vlog("error reading request: ~s",[ReasonString]),
+ E = lists:flatten(
+ io_lib:format("Error reading request: ~s",[ReasonString])),
+ error_log(mod_log, SocketType, Socket, ConfigDB, Peername, E),
+ error_log(mod_disk_log, SocketType, Socket, ConfigDB, Peername, E),
+ case httpd_util:lookup(ConfigDB,Item,Default) of
+ reply414 ->
+ send_read_status(SocketType, Socket, 414, ReasonString, ConfigDB);
+ _ ->
+ ok
+ end.
+
+send_read_status(SocketType, Socket, Code, ReasonString, ConfigDB) ->
+ httpd_response:send_status(SocketType, Socket, Code, ReasonString,
+ ConfigDB).
+
+
+error_log(Mod, SocketType, Socket, ConfigDB, Peername, String) ->
+ Modules = httpd_util:lookup(ConfigDB, modules,
+ [mod_get, mod_head, mod_log]),
+ case lists:member(Mod, Modules) of
+ true ->
+ Mod:error_log(SocketType, Socket, ConfigDB, Peername, String);
+ _ ->
+ ok
+ end.
+
+
+sz(L) when list(L) ->
+ length(L);
+sz(B) when binary(B) ->
+ size(B);
+sz(O) ->
+ {unknown_size,O}.
+
+
+%% Socket utility functions:
+
+close(SocketType, Socket, ConfigDB) ->
+ case httpd_socket:close(SocketType, Socket) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ ?vlog("error while closing socket: ~p",[Reason]),
+ ok
+ end.
+
+close_sleep({ssl, _}, Time) ->
+ sleep(Time);
+close_sleep(_, _) ->
+ ok.
+
+
+sleep(T) -> receive after T -> ok end.
+
+
+dec(N) when integer(N) ->
+ N-1;
+dec(N) ->
+ N.
+
+
+content_length(#mod{parsed_header = Ph}) ->
+ list_to_integer(httpd_util:key1search(Ph, "content-length","0")).
+
+
+remove_newline(List)->
+ lists:dropwhile(fun newline/1,List).
+
+newline($\r) ->
+ true;
+newline($\n) ->
+ true;
+newline(_Sign) ->
+ false.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_response.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_response.erl
new file mode 100644
index 0000000000..4c7f8e0c8f
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_response.erl
@@ -0,0 +1,437 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_response.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_response).
+-export([send/1, send_status/3, send_status/5]).
+
+%%code is the key for the statuscode ex: 200 404 ...
+-define(HTTP11HEADERFIELDS,[content_length, accept_ranges, cache_control, date,
+ pragma, trailer, transfer_encoding, etag, location,
+ retry_after, server, allow,
+ content_encoding, content_language,
+ content_location, content_MD5, content_range,
+ content_type, expires, last_modified]).
+
+-define(HTTP10HEADERFIELDS,[content_length, date, pragma, transfer_encoding,
+ location, server, allow, content_encoding,
+ content_type, last_modified]).
+
+-define(PROCEED_RESPONSE(StatusCode, Info),
+ {proceed,
+ [{response,{already_sent, StatusCode,
+ httpd_util:key1search(Info#mod.data,content_lenght)}}]}).
+
+
+-include("httpd.hrl").
+
+-define(VMODULE,"RESPONSE").
+-include("httpd_verbosity.hrl").
+
+%% send
+
+send(#mod{config_db = ConfigDB} = Info) ->
+ ?vtrace("send -> Request line: ~p", [Info#mod.request_line]),
+ Modules = httpd_util:lookup(ConfigDB,modules,[mod_get, mod_head, mod_log]),
+ case traverse_modules(Info, Modules) of
+ done ->
+ Info;
+ {proceed, Data} ->
+ case httpd_util:key1search(Data, status) of
+ {StatusCode, PhraseArgs, Reason} ->
+ ?vdebug("send -> proceed/status: ~n"
+ "~n StatusCode: ~p"
+ "~n PhraseArgs: ~p"
+ "~n Reason: ~p",
+ [StatusCode, PhraseArgs, Reason]),
+ send_status(Info, StatusCode, PhraseArgs),
+ Info;
+
+ undefined ->
+ case httpd_util:key1search(Data, response) of
+ {already_sent, StatusCode, Size} ->
+ ?vtrace("send -> already sent: "
+ "~n StatusCode: ~p"
+ "~n Size: ~p",
+ [StatusCode, Size]),
+ Info;
+ {response, Header, Body} -> %% New way
+ send_response(Info, Header, Body),
+ Info;
+ {StatusCode, Response} -> %% Old way
+ send_response_old(Info, StatusCode, Response),
+ Info;
+ undefined ->
+ ?vtrace("send -> undefined response", []),
+ send_status(Info, 500, none),
+ Info
+ end
+ end
+ end.
+
+
+%% traverse_modules
+
+traverse_modules(Info,[]) ->
+ {proceed,Info#mod.data};
+traverse_modules(Info,[Module|Rest]) ->
+ case (catch apply(Module,do,[Info])) of
+ {'EXIT', Reason} ->
+ ?vlog("traverse_modules -> exit reason: ~p",[Reason]),
+ String =
+ lists:flatten(
+ io_lib:format("traverse exit from apply: ~p:do => ~n~p",
+ [Module, Reason])),
+ report_error(mod_log, Info#mod.config_db, String),
+ report_error(mod_disk_log, Info#mod.config_db, String),
+ done;
+ done ->
+ done;
+ {break,NewData} ->
+ {proceed,NewData};
+ {proceed,NewData} ->
+ traverse_modules(Info#mod{data=NewData},Rest)
+ end.
+
+%% send_status %%
+
+
+send_status(#mod{socket_type = SocketType,
+ socket = Socket,
+ connection = Conn} = Info, 100, _PhraseArgs) ->
+ ?DEBUG("send_status -> StatusCode: ~p~n",[100]),
+ Header = httpd_util:header(100, Conn),
+ httpd_socket:deliver(SocketType, Socket,
+ [Header, "Content-Length:0\r\n\r\n"]);
+
+send_status(#mod{socket_type = SocketType,
+ socket = Socket,
+ config_db = ConfigDB} = Info, StatusCode, PhraseArgs) ->
+ send_status(SocketType, Socket, StatusCode, PhraseArgs, ConfigDB).
+
+send_status(SocketType, Socket, StatusCode, PhraseArgs, ConfigDB) ->
+ ?DEBUG("send_status -> ~n"
+ " StatusCode: ~p~n"
+ " PhraseArgs: ~p",
+ [StatusCode, PhraseArgs]),
+ Header = httpd_util:header(StatusCode, "text/html", false),
+ ReasonPhrase = httpd_util:reason_phrase(StatusCode),
+ Message = httpd_util:message(StatusCode, PhraseArgs, ConfigDB),
+ Body = get_body(ReasonPhrase, Message),
+ Header1 =
+ Header ++
+ "Content-Length:" ++
+ integer_to_list(length(Body)) ++
+ "\r\n\r\n",
+ httpd_socket:deliver(SocketType, Socket, [Header1, Body]).
+
+
+get_body(ReasonPhrase, Message)->
+ "<HTML>
+ <HEAD>
+ <TITLE>"++ReasonPhrase++"</TITLE>
+ </HEAD>
+ <BODY>
+ <H1>"++ReasonPhrase++"</H1>\n"++Message++"\n</BODY>
+ </HTML>\n".
+
+
+%%% Create a response from the Key/Val tuples In the Head List
+%%% Body is a tuple {body,Fun(),Args}
+
+%% send_response
+%% Allowed Fields
+
+% HTTP-Version StatusCode Reason-Phrase
+% *((general-headers
+% response-headers
+% entity-headers)CRLF)
+% CRLF
+% ?(BODY)
+
+% General Header fields
+% ======================
+% Cache-Control cache_control
+% Connection %%Is set dependiong on the request
+% Date
+% Pramga
+% Trailer
+% Transfer-Encoding
+
+% Response Header field
+% =====================
+% Accept-Ranges
+% (Age) Mostly for proxys
+% Etag
+% Location
+% (Proxy-Authenticate) Only for proxies
+% Retry-After
+% Server
+% Vary
+% WWW-Authenticate
+%
+% Entity Header Fields
+% ====================
+% Allow
+% Content-Encoding
+% Content-Language
+% Content-Length
+% Content-Location
+% Content-MD5
+% Content-Range
+% Content-Type
+% Expires
+% Last-Modified
+
+
+send_response(Info, Header, Body) ->
+ ?vtrace("send_response -> (new) entry with"
+ "~n Header: ~p", [Header]),
+ case httpd_util:key1search(Header, code) of
+ undefined ->
+ %% No status code
+ %% Ooops this must be very bad:
+ %% generate a 404 content not availible
+ send_status(Info, 404, "The file is not availible");
+ StatusCode ->
+ case send_header(Info, StatusCode, Header) of
+ ok ->
+ send_body(Info, StatusCode, Body);
+ Error ->
+ ?vlog("head delivery failure: ~p", [Error]),
+ done
+ end
+ end.
+
+
+send_header(#mod{socket_type = Type, socket = Sock,
+ http_version = Ver, connection = Conn} = Info,
+ StatusCode, Head0) ->
+ ?vtrace("send_haeder -> entry with"
+ "~n Ver: ~p"
+ "~n Conn: ~p", [Ver, Conn]),
+ Head1 = create_header(Ver, Head0),
+ StatusLine = [Ver, " ",
+ io_lib:write(StatusCode), " ",
+ httpd_util:reason_phrase(StatusCode), "\r\n"],
+ Connection = get_connection(Conn, Ver),
+ Head = list_to_binary([StatusLine, Head1, Connection,"\r\n"]),
+ ?vtrace("deliver head", []),
+ httpd_socket:deliver(Type, Sock, Head).
+
+
+send_body(_, _, nobody) ->
+ ?vtrace("send_body -> no body", []),
+ ok;
+
+send_body(#mod{socket_type = Type, socket = Sock},
+ StatusCode, Body) when list(Body) ->
+ ?vtrace("deliver body of size ~p", [length(Body)]),
+ httpd_socket:deliver(Type, Sock, Body);
+
+send_body(#mod{socket_type = Type, socket = Sock} = Info,
+ StatusCode, {Fun, Args}) ->
+ case (catch apply(Fun, Args)) of
+ close ->
+ httpd_socket:close(Type, Sock),
+ done;
+
+ sent ->
+ ?PROCEED_RESPONSE(StatusCode, Info);
+
+ {ok, Body} ->
+ ?vtrace("deliver body", []),
+ case httpd_socket:deliver(Type, Sock, Body) of
+ ok ->
+ ?PROCEED_RESPONSE(StatusCode, Info);
+ Error ->
+ ?vlog("body delivery failure: ~p", [Error]),
+ done
+ end;
+
+ Error ->
+ ?vlog("failure of apply(~p,~p): ~p", [Fun, Args, Error]),
+ done
+ end;
+send_body(I, S, B) ->
+ ?vinfo("BAD ARGS: "
+ "~n I: ~p"
+ "~n S: ~p"
+ "~n B: ~p", [I, S, B]),
+ exit({bad_args, {I, S, B}}).
+
+
+%% Return a HTTP-header field that indicates that the
+%% connection will be inpersistent
+get_connection(true,"HTTP/1.0")->
+ "Connection:close\r\n";
+get_connection(false,"HTTP/1.1") ->
+ "Connection:close\r\n";
+get_connection(_,_) ->
+ "".
+
+
+create_header("HTTP/1.1", Data) ->
+ create_header1(?HTTP11HEADERFIELDS, Data);
+create_header(_, Data) ->
+ create_header1(?HTTP10HEADERFIELDS, Data).
+
+create_header1(Fields, Data) ->
+ ?DEBUG("create_header() -> "
+ "~n Fields :~p~n Data: ~p ~n", [Fields, Data]),
+ mapfilter(fun(Field)->
+ transform({Field, httpd_util:key1search(Data, Field)})
+ end, Fields, undefined).
+
+
+%% Do a map and removes the values that evaluates to RemoveVal
+mapfilter(Fun,List,RemoveVal)->
+ mapfilter(Fun,List,[],RemoveVal).
+
+mapfilter(Fun,[],[RemoveVal|Acc],RemoveVal)->
+ Acc;
+mapfilter(Fun,[],Acc,_RemoveVal)->
+ Acc;
+
+mapfilter(Fun,[Elem|Rest],[RemoveVal|Acc],RemoveVal)->
+ mapfilter(Fun,Rest,[Fun(Elem)|Acc],RemoveVal);
+mapfilter(Fun,[Elem|Rest],Acc,RemoveVal)->
+ mapfilter(Fun,Rest,[Fun(Elem)|Acc],RemoveVal).
+
+
+transform({content_type,undefined})->
+ ["Content-Type:text/plain\r\n"];
+
+transform({date,undefined})->
+ ["Date:",httpd_util:rfc1123_date(),"\r\n"];
+
+transform({date,RFCDate})->
+ ["Date:",RFCDate,"\r\n"];
+
+
+transform({_Key,undefined})->
+ undefined;
+transform({accept_ranges,Value})->
+ ["Accept-Ranges:",Value,"\r\n"];
+transform({cache_control,Value})->
+ ["Cache-Control:",Value,"\r\n"];
+transform({pragma,Value})->
+ ["Pragma:",Value,"\r\n"];
+transform({trailer,Value})->
+ ["Trailer:",Value,"\r\n"];
+transform({transfer_encoding,Value})->
+ ["Pragma:",Value,"\r\n"];
+transform({etag,Value})->
+ ["ETag:",Value,"\r\n"];
+transform({location,Value})->
+ ["Retry-After:",Value,"\r\n"];
+transform({server,Value})->
+ ["Server:",Value,"\r\n"];
+transform({allow,Value})->
+ ["Allow:",Value,"\r\n"];
+transform({content_encoding,Value})->
+ ["Content-Encoding:",Value,"\r\n"];
+transform({content_language,Value})->
+ ["Content-Language:",Value,"\r\n"];
+transform({retry_after,Value})->
+ ["Retry-After:",Value,"\r\n"];
+transform({server,Value})->
+ ["Server:",Value,"\r\n"];
+transform({allow,Value})->
+ ["Allow:",Value,"\r\n"];
+transform({content_encoding,Value})->
+ ["Content-Encoding:",Value,"\r\n"];
+transform({content_language,Value})->
+ ["Content-Language:",Value,"\r\n"];
+transform({content_location,Value})->
+ ["Content-Location:",Value,"\r\n"];
+transform({content_length,Value})->
+ ["Content-Length:",Value,"\r\n"];
+transform({content_MD5,Value})->
+ ["Content-MD5:",Value,"\r\n"];
+transform({content_range,Value})->
+ ["Content-Range:",Value,"\r\n"];
+transform({content_type,Value})->
+ ["Content-Type:",Value,"\r\n"];
+transform({expires,Value})->
+ ["Expires:",Value,"\r\n"];
+transform({last_modified,Value})->
+ ["Last-Modified:",Value,"\r\n"].
+
+
+
+%%----------------------------------------------------------------------
+%% This is the old way of sending data it is strongly encouraged to
+%% Leave this method and go on to the newer form of response
+%% OTP-4408
+%%----------------------------------------------------------------------
+
+send_response_old(#mod{socket_type = Type,
+ socket = Sock,
+ method = "HEAD"} = Info,
+ StatusCode, Response) ->
+ ?vtrace("send_response_old(HEAD) -> entry with"
+ "~n StatusCode: ~p"
+ "~n Response: ~p",
+ [StatusCode,Response]),
+ case httpd_util:split(lists:flatten(Response),"\r\n\r\n|\n\n",2) of
+ {ok, [Head, Body]} ->
+ Header =
+ httpd_util:header(StatusCode,Info#mod.connection) ++
+ "Content-Length:" ++ content_length(Body),
+ httpd_socket:deliver(Type, Sock, [Header,Head,"\r\n"]);
+
+ Error ->
+ send_status(Info, 500, "Internal Server Error")
+ end;
+
+send_response_old(#mod{socket_type = Type,
+ socket = Sock} = Info,
+ StatusCode, Response) ->
+ ?vtrace("send_response_old -> entry with"
+ "~n StatusCode: ~p"
+ "~n Response: ~p",
+ [StatusCode,Response]),
+ case httpd_util:split(lists:flatten(Response),"\r\n\r\n|\n\n",2) of
+ {ok, [_Head, Body]} ->
+ Header =
+ httpd_util:header(StatusCode,Info#mod.connection) ++
+ "Content-Length:" ++ content_length(Body),
+ httpd_socket:deliver(Type, Sock, [Header, Response]);
+
+ {ok, Body} ->
+ Header =
+ httpd_util:header(StatusCode,Info#mod.connection) ++
+ "Content-Length:" ++ content_length(Body) ++ "\r\n",
+ httpd_socket:deliver(Type, Sock, [Header, Response]);
+
+ {error, Reason} ->
+ send_status(Info, 500, "Internal Server Error")
+ end.
+
+content_length(Body)->
+ integer_to_list(httpd_util:flatlength(Body))++"\r\n".
+
+
+report_error(Mod, ConfigDB, Error) ->
+ Modules = httpd_util:lookup(ConfigDB, modules,
+ [mod_get, mod_head, mod_log]),
+ case lists:member(Mod, Modules) of
+ true ->
+ Mod:report_error(ConfigDB, Error);
+ _ ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_socket.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_socket.erl
new file mode 100644
index 0000000000..95dfc5e824
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_socket.erl
@@ -0,0 +1,381 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_socket.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_socket).
+-export([start/1,
+ listen/2, listen/3, accept/2, accept/3,
+ deliver/3, send/3, recv/4,
+ close/2,
+ peername/2, resolve/1, config/1,
+ controlling_process/3,
+ active_once/2]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"SOCKET").
+-include("httpd_verbosity.hrl").
+
+-include_lib("kernel/include/inet.hrl").
+
+%% start -> ok | {error,Reason}
+
+start(ip_comm) ->
+ case inet_db:start() of
+ {ok,_Pid} ->
+ ok;
+ {error,{already_started,_Pid}} ->
+ ok;
+ Error ->
+ Error
+ end;
+start({ssl,_SSLConfig}) ->
+ case ssl:start() of
+ ok ->
+ ok;
+ {ok, _} ->
+ ok;
+ {error,{already_started,_}} ->
+ ok;
+ Error ->
+ Error
+ end.
+
+%% listen
+
+listen(SocketType,Port) ->
+ listen(SocketType,undefined,Port).
+
+listen(ip_comm,Addr,Port) ->
+ ?DEBUG("listening(ip_comm) to port ~p", [Port]),
+ Opt = sock_opt(Addr,[{backlog,128},{reuseaddr,true}]),
+ case gen_tcp:listen(Port,Opt) of
+ {ok,ListenSocket} ->
+ ListenSocket;
+ Error ->
+ Error
+ end;
+listen({ssl,SSLConfig},Addr,Port) ->
+ ?DEBUG("listening(ssl) to port ~p"
+ "~n SSLConfig: ~p", [Port,SSLConfig]),
+ Opt = sock_opt(Addr,SSLConfig),
+ case ssl:listen(Port, Opt) of
+ {ok,ListenSocket} ->
+ ListenSocket;
+ Error ->
+ Error
+ end.
+
+
+sock_opt(undefined,Opt) -> [{packet,0},{active,false}|Opt];
+sock_opt(Addr,Opt) -> [{ip, Addr},{packet,0},{active,false}|Opt].
+
+%% -define(packet_type_http,true).
+%% -define(packet_type_httph,true).
+
+%% -ifdef(packet_type_http).
+%% sock_opt(undefined,Opt) -> [{packet,http},{active,false}|Opt];
+%% sock_opt(Addr,Opt) -> [{ip, Addr},{packet,http},{active,false}|Opt].
+%% -elif(packet_type_httph).
+%% sock_opt(undefined,Opt) -> [{packet,httph},{active,false}|Opt];
+%% sock_opt(Addr,Opt) -> [{ip, Addr},{packet,httph},{active,false}|Opt].
+%% -else.
+%% sock_opt(undefined,Opt) -> [{packet,0},{active,false}|Opt];
+%% sock_opt(Addr,Opt) -> [{ip, Addr},{packet,0},{active,false}|Opt].
+%% -endif.
+
+
+%% active_once
+
+active_once(Type, Sock) ->
+ active(Type, Sock, once).
+
+active(ip_comm, Sock, Active) ->
+ inet:setopts(Sock, [{active, Active}]);
+active({ssl, _SSLConfig}, Sock, Active) ->
+ ssl:setopts(Sock, [{active, Active}]).
+
+%% accept
+
+accept(A, B) ->
+ accept(A, B, infinity).
+
+
+accept(ip_comm,ListenSocket, T) ->
+ ?DEBUG("accept(ip_comm) on socket ~p", [ListenSocket]),
+ case gen_tcp:accept(ListenSocket, T) of
+ {ok,Socket} ->
+ Socket;
+ Error ->
+ ?vtrace("accept(ip_comm) failed for reason:"
+ "~n Error: ~p",[Error]),
+ Error
+ end;
+accept({ssl,_SSLConfig},ListenSocket, T) ->
+ ?DEBUG("accept(ssl) on socket ~p", [ListenSocket]),
+ case ssl:accept(ListenSocket, T) of
+ {ok,Socket} ->
+ Socket;
+ Error ->
+ ?vtrace("accept(ssl) failed for reason:"
+ "~n Error: ~p",[Error]),
+ Error
+ end.
+
+
+%% controlling_process
+
+controlling_process(ip_comm, Socket, Pid) ->
+ gen_tcp:controlling_process(Socket, Pid);
+controlling_process({ssl, _}, Socket, Pid) ->
+ ssl:controlling_process(Socket, Pid).
+
+
+%% deliver
+
+deliver(SocketType, Socket, IOListOrBinary) ->
+ case send(SocketType, Socket, IOListOrBinary) of
+% {error, einval} ->
+% ?vlog("deliver failed for reason: einval"
+% "~n SocketType: ~p"
+% "~n Socket: ~p"
+% "~n Data: ~p",
+% [SocketType, Socket, type(IOListOrBinary)]),
+% (catch close(SocketType, Socket)),
+% socket_closed;
+ {error, _Reason} ->
+ ?vlog("deliver(~p) failed for reason:"
+ "~n Reason: ~p",[SocketType,_Reason]),
+ (catch close(SocketType, Socket)),
+ socket_closed;
+ _ ->
+ ok
+ end.
+
+% type(L) when list(L) ->
+% {list, L};
+% type(B) when binary(B) ->
+% Decoded =
+% case (catch binary_to_term(B)) of
+% {'EXIT', _} ->
+% %% Oups, not a term, try list
+% case (catch binary_to_list(B)) of
+% %% Oups, not a list either, give up
+% {'EXIT', _} ->
+% {size, size(B)};
+% L ->
+% {list, L}
+% end;
+
+% T ->
+% {term, T}
+% end,
+% {binary, Decoded};
+% type(T) when tuple(T) ->
+% {tuple, T};
+% type(I) when integer(I) ->
+% {integer, I};
+% type(F) when float(F) ->
+% {float, F};
+% type(P) when pid(P) ->
+% {pid, P};
+% type(P) when port(P) ->
+% {port, P};
+% type(R) when reference(R) ->
+% {reference, R};
+% type(T) ->
+% {term, T}.
+
+
+
+send(ip_comm,Socket,Data) ->
+ ?DEBUG("send(ip_comm) -> ~p bytes on socket ~p",[data_size(Data),Socket]),
+ gen_tcp:send(Socket,Data);
+send({ssl,SSLConfig},Socket,Data) ->
+ ?DEBUG("send(ssl) -> ~p bytes on socket ~p",[data_size(Data),Socket]),
+ ssl:send(Socket, Data).
+
+recv(ip_comm,Socket,Length,Timeout) ->
+ ?DEBUG("recv(ip_comm) -> read from socket ~p",[Socket]),
+ gen_tcp:recv(Socket,Length,Timeout);
+recv({ssl,SSLConfig},Socket,Length,Timeout) ->
+ ?DEBUG("recv(ssl) -> read from socket ~p",[Socket]),
+ ssl:recv(Socket,Length,Timeout).
+
+-ifdef(inets_debug).
+data_size(L) when list(L) ->
+ httpd_util:flatlength(L);
+data_size(B) when binary(B) ->
+ size(B);
+data_size(O) ->
+ {unknown_size,O}.
+-endif.
+
+
+%% peername
+
+peername(ip_comm, Socket) ->
+ case inet:peername(Socket) of
+ {ok,{{A,B,C,D},Port}} ->
+ PeerName = integer_to_list(A)++"."++integer_to_list(B)++"."++
+ integer_to_list(C)++"."++integer_to_list(D),
+ ?DEBUG("peername(ip_comm) on socket ~p: ~p",
+ [Socket,{Port,PeerName}]),
+ {Port,PeerName};
+ {error,Reason} ->
+ ?vlog("failed getting peername:"
+ "~n Reason: ~p"
+ "~n Socket: ~p",
+ [Reason,Socket]),
+ {-1,"unknown"}
+ end;
+peername({ssl,_SSLConfig},Socket) ->
+ case ssl:peername(Socket) of
+ {ok,{{A,B,C,D},Port}} ->
+ PeerName = integer_to_list(A)++"."++integer_to_list(B)++"."++
+ integer_to_list(C)++"."++integer_to_list(D),
+ ?DEBUG("peername(ssl) on socket ~p: ~p",
+ [Socket, {Port,PeerName}]),
+ {Port,PeerName};
+ {error,_Reason} ->
+ {-1,"unknown"}
+ end.
+
+%% resolve
+
+resolve(_) ->
+ {ok,Name} = inet:gethostname(),
+ Name.
+
+%% close
+
+close(ip_comm,Socket) ->
+ Res =
+ case (catch gen_tcp:close(Socket)) of
+ ok -> ok;
+ {error,Reason} -> {error,Reason};
+ {'EXIT',{noproc,_}} -> {error,closed};
+ {'EXIT',Reason} -> {error,Reason};
+ Otherwise -> {error,Otherwise}
+ end,
+ ?vtrace("close(ip_comm) result: ~p",[Res]),
+ Res;
+close({ssl,_SSLConfig},Socket) ->
+ Res =
+ case (catch ssl:close(Socket)) of
+ ok -> ok;
+ {error,Reason} -> {error,Reason};
+ {'EXIT',{noproc,_}} -> {error,closed};
+ {'EXIT',Reason} -> {error,Reason};
+ Otherwise -> {error,Otherwise}
+ end,
+ ?vtrace("close(ssl) result: ~p",[Res]),
+ Res.
+
+%% config (debug: {certfile, "/var/tmp/server_root/conf/ssl_server.pem"})
+
+config(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,com_type,ip_comm) of
+ ssl ->
+ case ssl_certificate_file(ConfigDB) of
+ undefined ->
+ {error,
+ ?NICE("Directive SSLCertificateFile "
+ "not found in the config file")};
+ SSLCertificateFile ->
+ {ssl,
+ SSLCertificateFile++
+ ssl_certificate_key_file(ConfigDB)++
+ ssl_verify_client(ConfigDB)++
+ ssl_ciphers(ConfigDB)++
+ ssl_password(ConfigDB)++
+ ssl_verify_depth(ConfigDB)++
+ ssl_ca_certificate_file(ConfigDB)}
+ end;
+ ip_comm ->
+ ip_comm
+ end.
+
+ssl_certificate_file(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_certificate_file) of
+ undefined ->
+ undefined;
+ SSLCertificateFile ->
+ [{certfile,SSLCertificateFile}]
+ end.
+
+ssl_certificate_key_file(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_certificate_key_file) of
+ undefined ->
+ [];
+ SSLCertificateKeyFile ->
+ [{keyfile,SSLCertificateKeyFile}]
+ end.
+
+ssl_verify_client(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_verify_client) of
+ undefined ->
+ [];
+ SSLVerifyClient ->
+ [{verify,SSLVerifyClient}]
+ end.
+
+ssl_ciphers(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_ciphers) of
+ undefined ->
+ [];
+ Ciphers ->
+ [{ciphers, Ciphers}]
+ end.
+
+ssl_password(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_password_callback_module) of
+ undefined ->
+ [];
+ Module ->
+ case httpd_util:lookup(ConfigDB, ssl_password_callback_function) of
+ undefined ->
+ [];
+ Function ->
+ case catch apply(Module, Function, []) of
+ Password when list(Password) ->
+ [{password, Password}];
+ Error ->
+ error_report(ssl_password,Module,Function,Error),
+ []
+ end
+ end
+ end.
+
+ssl_verify_depth(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB, ssl_verify_client_depth) of
+ undefined ->
+ [];
+ Depth ->
+ [{depth, Depth}]
+ end.
+
+ssl_ca_certificate_file(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB, ssl_ca_certificate_file) of
+ undefined ->
+ [];
+ File ->
+ [{cacertfile, File}]
+ end.
+
+
+error_report(Where,M,F,Error) ->
+ error_logger:error_report([{?MODULE, Where}, {apply, {M, F, []}}, Error]).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_sup.erl
new file mode 100644
index 0000000000..fd557c30db
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_sup.erl
@@ -0,0 +1,203 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_sup.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The top supervisor for the inets application
+%%----------------------------------------------------------------------
+
+-module(httpd_sup).
+
+-behaviour(supervisor).
+
+-include("httpd_verbosity.hrl").
+
+%% public
+-export([start/2, start_link/2, start2/2, start_link2/2, stop/1, stop/2, stop2/1]).
+-export([init/1]).
+
+
+-define(D(F, A), io:format("~p:" ++ F ++ "~n", [?MODULE|A])).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% supervisor callback functions
+
+start(ConfigFile, Verbosity) ->
+ case start_link(ConfigFile, Verbosity) of
+ {ok, Pid} ->
+ unlink(Pid),
+ {ok, Pid};
+
+ Else ->
+ Else
+ end.
+
+
+start_link(ConfigFile, Verbosity) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok, ConfigList, Addr, Port} ->
+ Name = make_name(Addr, Port),
+ SupName = {local, Name},
+ supervisor:start_link(SupName, ?MODULE,
+ [ConfigFile, ConfigList,
+ Verbosity, Addr, Port]);
+
+ {error, Reason} ->
+ error_logger:error_report(Reason),
+ {stop, Reason};
+
+ Else ->
+ error_logger:error_report(Else),
+ {stop, Else}
+ end.
+
+
+start2(ConfigList, Verbosity) ->
+ case start_link2(ConfigList, Verbosity) of
+ {ok, Pid} ->
+ unlink(Pid),
+ {ok, Pid};
+
+ Else ->
+ Else
+ end.
+
+
+start_link2(ConfigList, Verbosity) ->
+ case get_addr_and_port2(ConfigList) of
+ {ok, Addr, Port} ->
+ Name = make_name(Addr, Port),
+ SupName = {local, Name},
+ supervisor:start_link(SupName, ?MODULE,
+ [undefined, ConfigList, Verbosity, Addr, Port]);
+
+ {error, Reason} ->
+ error_logger:error_report(Reason),
+ {stop, Reason};
+
+ Else ->
+ error_logger:error_report(Else),
+ {stop, Else}
+ end.
+
+
+
+stop(Pid) when pid(Pid) ->
+ do_stop(Pid);
+stop(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok, _, Addr, Port} ->
+ stop(Addr, Port);
+
+ Error ->
+ Error
+ end;
+stop(StartArgs) ->
+ ok.
+
+
+stop(Addr, Port) when integer(Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ do_stop(Pid),
+ ok;
+ _ ->
+ not_started
+ end.
+
+stop2(ConfigList) when list(ConfigList) ->
+ {ok, Addr, Port} = get_addr_and_port2(ConfigList),
+ stop(Addr, Port).
+
+
+do_stop(Pid) ->
+ exit(Pid, shutdown).
+
+
+init([ConfigFile, ConfigList, Verbosity, Addr, Port]) ->
+ init(ConfigFile, ConfigList, Verbosity, Addr, Port);
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+init(ConfigFile, ConfigList, Verbosity, Addr, Port) ->
+ Flags = {one_for_one, 0, 1},
+ AccSupVerbosity = get_acc_sup_verbosity(Verbosity),
+ MiscSupVerbosity = get_misc_sup_verbosity(Verbosity),
+ Sups = [sup_spec(httpd_acceptor_sup, Addr, Port, AccSupVerbosity),
+ sup_spec(httpd_misc_sup, Addr, Port, MiscSupVerbosity),
+ worker_spec(httpd_manager, Addr, Port, ConfigFile, ConfigList,
+ Verbosity, [gen_server])],
+ {ok, {Flags, Sups}}.
+
+
+sup_spec(Name, Addr, Port, Verbosity) ->
+ {{Name, Addr, Port},
+ {Name, start, [Addr, Port, Verbosity]},
+ permanent, 2000, supervisor, [Name, supervisor]}.
+
+worker_spec(Name, Addr, Port, ConfigFile, ConfigList, Verbosity, Modules) ->
+ {{Name, Addr, Port},
+ {Name, start_link, [ConfigFile, ConfigList, Verbosity]},
+ permanent, 2000, worker, [Name] ++ Modules}.
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_sup",Addr,Port).
+
+
+%% get_addr_and_port
+
+get_addr_and_port(ConfigFile) ->
+ case httpd_conf:load(ConfigFile) of
+ {ok, ConfigList} ->
+ {ok, Addr, Port} = get_addr_and_port2(ConfigList),
+ {ok, ConfigList, Addr, Port};
+ Error ->
+ Error
+ end.
+
+
+get_addr_and_port2(ConfigList) ->
+ Port = httpd_util:key1search(ConfigList, port, 80),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ {ok, Addr, Port}.
+
+get_acc_sup_verbosity(V) ->
+ case key1search(V, all) of
+ undefined ->
+ key1search(V, acceptor_sup_verbosity, ?default_verbosity);
+ Verbosity ->
+ Verbosity
+ end.
+
+
+get_misc_sup_verbosity(V) ->
+ case key1search(V, all) of
+ undefined ->
+ key1search(V, misc_sup_verbosity, ?default_verbosity);
+ Verbosity ->
+ Verbosity
+ end.
+
+
+key1search(L, K) ->
+ httpd_util:key1search(L, K).
+
+key1search(L, K, D) ->
+ httpd_util:key1search(L, K, D).
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_util.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_util.erl
new file mode 100644
index 0000000000..05064a8d38
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_util.erl
@@ -0,0 +1,777 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_util.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_util).
+-export([key1search/2, key1search/3, lookup/2, lookup/3, multi_lookup/2,
+ lookup_mime/2, lookup_mime/3, lookup_mime_default/2,
+ lookup_mime_default/3, reason_phrase/1, message/3, rfc1123_date/0,
+ rfc1123_date/1, day/1, month/1, decode_hex/1, decode_base64/1, encode_base64/1,
+ flatlength/1, split_path/1, split_script_path/1, suffix/1, to_upper/1,
+ to_lower/1, split/3, header/2, header/3, header/4, uniq/1,
+ make_name/2,make_name/3,make_name/4,strip/1,
+ hexlist_to_integer/1,integer_to_hexlist/1,
+ convert_request_date/1,create_etag/1,create_etag/2,getSize/1,
+ response_generated/1]).
+
+%%Since hexlist_to_integer is a lousy name make a name convert
+-export([encode_hex/1]).
+-include("httpd.hrl").
+
+%% key1search
+
+key1search(TupleList,Key) ->
+ key1search(TupleList,Key,undefined).
+
+key1search(TupleList,Key,Undefined) ->
+ case lists:keysearch(Key,1,TupleList) of
+ {value,{Key,Value}} ->
+ Value;
+ false ->
+ Undefined
+ end.
+
+%% lookup
+
+lookup(Table,Key) ->
+ lookup(Table,Key,undefined).
+
+lookup(Table,Key,Undefined) ->
+ case catch ets:lookup(Table,Key) of
+ [{Key,Value}|_] ->
+ Value;
+ _->
+ Undefined
+ end.
+
+%% multi_lookup
+
+multi_lookup(Table,Key) ->
+ remove_key(ets:lookup(Table,Key)).
+
+remove_key([]) ->
+ [];
+remove_key([{_Key,Value}|Rest]) ->
+ [Value|remove_key(Rest)].
+
+%% lookup_mime
+
+lookup_mime(ConfigDB,Suffix) ->
+ lookup_mime(ConfigDB,Suffix,undefined).
+
+lookup_mime(ConfigDB,Suffix,Undefined) ->
+ [{mime_types,MimeTypesDB}|_]=ets:lookup(ConfigDB,mime_types),
+ case ets:lookup(MimeTypesDB,Suffix) of
+ [] ->
+ Undefined;
+ [{Suffix,MimeType}|_] ->
+ MimeType
+ end.
+
+%% lookup_mime_default
+
+lookup_mime_default(ConfigDB,Suffix) ->
+ lookup_mime_default(ConfigDB,Suffix,undefined).
+
+lookup_mime_default(ConfigDB,Suffix,Undefined) ->
+ [{mime_types,MimeTypesDB}|_]=ets:lookup(ConfigDB,mime_types),
+ case ets:lookup(MimeTypesDB,Suffix) of
+ [] ->
+ case ets:lookup(ConfigDB,default_type) of
+ [] ->
+ Undefined;
+ [{default_type,DefaultType}|_] ->
+ DefaultType
+ end;
+ [{Suffix,MimeType}|_] ->
+ MimeType
+ end.
+
+%% reason_phrase
+reason_phrase(100) -> "Continue";
+reason_phrase(101) -> "Swithing protocol";
+reason_phrase(200) -> "OK";
+reason_phrase(201) -> "Created";
+reason_phrase(202) -> "Accepted";
+reason_phrase(204) -> "No Content";
+reason_phrase(205) -> "Reset Content";
+reason_phrase(206) -> "Partial Content";
+reason_phrase(301) -> "Moved Permanently";
+reason_phrase(302) -> "Moved Temporarily";
+reason_phrase(304) -> "Not Modified";
+reason_phrase(400) -> "Bad Request";
+reason_phrase(401) -> "Unauthorized";
+reason_phrase(402) -> "Payment Required";
+reason_phrase(403) -> "Forbidden";
+reason_phrase(404) -> "Not Found";
+reason_phrase(405) -> "Method Not Allowed";
+reason_phrase(408) -> "Request Timeout";
+reason_phrase(411) -> "Length Required";
+reason_phrase(414) -> "Request-URI Too Long";
+reason_phrase(412) -> "Precondition Failed";
+reason_phrase(416) -> "request Range Not Satisfiable";
+reason_phrase(417) -> "Expectation failed";
+reason_phrase(500) -> "Internal Server Error";
+reason_phrase(501) -> "Not Implemented";
+reason_phrase(502) -> "Bad Gateway";
+reason_phrase(503) -> "Service Unavailable";
+reason_phrase(_) -> "Internal Server Error".
+
+%% message
+
+message(301,URL,_) ->
+ "The document has moved <A HREF=\""++URL++"\">here</A>.";
+message(304,_URL,_) ->
+ "The document has not been changed.";
+message(400,none,_) ->
+ "Your browser sent a query that this server could not understand.";
+message(401,none,_) ->
+ "This server could not verify that you
+are authorized to access the document you
+requested. Either you supplied the wrong
+credentials (e.g., bad password), or your
+browser does not understand how to supply
+the credentials required.";
+message(403,RequestURI,_) ->
+ "You do not have permission to access "++RequestURI++" on this server.";
+message(404,RequestURI,_) ->
+ "The requested URL "++RequestURI++" was not found on this server.";
+message(412,none,_) ->
+ "The requested preconditions where false";
+message(414,ReasonPhrase,_) ->
+ "Message "++ReasonPhrase++".";
+message(416,ReasonPhrase,_) ->
+ ReasonPhrase;
+
+message(500,none,ConfigDB) ->
+ ServerAdmin=lookup(ConfigDB,server_admin,"unknown@unknown"),
+ "The server encountered an internal error or
+misconfiguration and was unable to complete
+your request.
+<P>Please contact the server administrator "++ServerAdmin++",
+and inform them of the time the error occurred
+and anything you might have done that may have
+caused the error.";
+message(501,{Method,RequestURI,HTTPVersion},_ConfigDB) ->
+ Method++" to "++RequestURI++" ("++HTTPVersion++") not supported.";
+message(503,String,_ConfigDB) ->
+ "This service in unavailable due to: "++String.
+
+%%convert_rfc_date(Date)->{{YYYY,MM,DD},{HH,MIN,SEC}}
+
+convert_request_date([D,A,Y,DateType|Rest]) ->
+ Func=case DateType of
+ $\, ->
+ fun convert_rfc1123_date/1;
+ $\ ->
+ fun convert_ascii_date/1;
+ _ ->
+ fun convert_rfc850_date/1
+ end,
+ case catch Func([D,A,Y,DateType|Rest])of
+ {ok,Date} ->
+ Date;
+ _Error ->
+ bad_date
+ end.
+
+convert_rfc850_date(DateStr) ->
+ case string:tokens(DateStr," ") of
+ [_WeekDay,Date,Time,_TimeZone|_Rest] ->
+ convert_rfc850_date(Date,Time);
+ _Error ->
+ bad_date
+ end.
+
+convert_rfc850_date([D1,D2,_,M,O,N,_,Y1,Y2|_Rest],[H1,H2,_Col,M1,M2,_Col,S1,S2|_Rest2])->
+ Year=list_to_integer([50,48,Y1,Y2]),
+ Day=list_to_integer([D1,D2]),
+ Month=convert_month([M,O,N]),
+ Hour=list_to_integer([H1,H2]),
+ Min=list_to_integer([M1,M2]),
+ Sec=list_to_integer([S1,S2]),
+ {ok,{{Year,Month,Day},{Hour,Min,Sec}}};
+convert_rfc850_date(_BadDate,_BadTime)->
+ bad_date.
+
+convert_ascii_date([_D,_A,_Y,_SP,M,O,N,_SP,D1,D2,_SP,H1,H2,_Col,M1,M2,_Col,S1,S2,_SP,Y1,Y2,Y3,Y4|_Rest])->
+ Year=list_to_integer([Y1,Y2,Y3,Y4]),
+ Day=case D1 of
+ $\ ->
+ list_to_integer([D2]);
+ _->
+ list_to_integer([D1,D2])
+ end,
+ Month=convert_month([M,O,N]),
+ Hour=list_to_integer([H1,H2]),
+ Min=list_to_integer([M1,M2]),
+ Sec=list_to_integer([S1,S2]),
+ {ok,{{Year,Month,Day},{Hour,Min,Sec}}};
+convert_ascii_date(BadDate)->
+ bad_date.
+convert_rfc1123_date([_D,_A,_Y,_C,_SP,D1,D2,_SP,M,O,N,_SP,Y1,Y2,Y3,Y4,_SP,H1,H2,_Col,M1,M2,_Col,S1,S2|Rest])->
+ Year=list_to_integer([Y1,Y2,Y3,Y4]),
+ Day=list_to_integer([D1,D2]),
+ Month=convert_month([M,O,N]),
+ Hour=list_to_integer([H1,H2]),
+ Min=list_to_integer([M1,M2]),
+ Sec=list_to_integer([S1,S2]),
+ {ok,{{Year,Month,Day},{Hour,Min,Sec}}};
+convert_rfc1123_date(BadDate)->
+ bad_date.
+
+convert_month("Jan")->1;
+convert_month("Feb") ->2;
+convert_month("Mar") ->3;
+convert_month("Apr") ->4;
+convert_month("May") ->5;
+convert_month("Jun") ->6;
+convert_month("Jul") ->7;
+convert_month("Aug") ->8;
+convert_month("Sep") ->9;
+convert_month("Oct") ->10;
+convert_month("Nov") ->11;
+convert_month("Dec") ->12.
+
+
+%% rfc1123_date
+
+rfc1123_date() ->
+ {{YYYY,MM,DD},{Hour,Min,Sec}}=calendar:universal_time(),
+ DayNumber=calendar:day_of_the_week({YYYY,MM,DD}),
+ lists:flatten(io_lib:format("~s, ~2.2.0w ~3.s ~4.4.0w ~2.2.0w:~2.2.0w:~2.2.0w GMT",
+ [day(DayNumber),DD,month(MM),YYYY,Hour,Min,Sec])).
+
+rfc1123_date({{YYYY,MM,DD},{Hour,Min,Sec}}) ->
+ DayNumber=calendar:day_of_the_week({YYYY,MM,DD}),
+ lists:flatten(io_lib:format("~s, ~2.2.0w ~3.s ~4.4.0w ~2.2.0w:~2.2.0w:~2.2.0w GMT",
+ [day(DayNumber),DD,month(MM),YYYY,Hour,Min,Sec])).
+
+%% uniq
+
+uniq([]) ->
+ [];
+uniq([First,First|Rest]) ->
+ uniq([First|Rest]);
+uniq([First|Rest]) ->
+ [First|uniq(Rest)].
+
+
+%% day
+
+day(1) -> "Mon";
+day(2) -> "Tue";
+day(3) -> "Wed";
+day(4) -> "Thu";
+day(5) -> "Fri";
+day(6) -> "Sat";
+day(7) -> "Sun".
+
+%% month
+
+month(1) -> "Jan";
+month(2) -> "Feb";
+month(3) -> "Mar";
+month(4) -> "Apr";
+month(5) -> "May";
+month(6) -> "Jun";
+month(7) -> "Jul";
+month(8) -> "Aug";
+month(9) -> "Sep";
+month(10) -> "Oct";
+month(11) -> "Nov";
+month(12) -> "Dec".
+
+%% decode_hex
+
+decode_hex([$%,Hex1,Hex2|Rest]) ->
+ [hex2dec(Hex1)*16+hex2dec(Hex2)|decode_hex(Rest)];
+decode_hex([First|Rest]) ->
+ [First|decode_hex(Rest)];
+decode_hex([]) ->
+ [].
+
+hex2dec(X) when X>=$0,X=<$9 -> X-$0;
+hex2dec(X) when X>=$A,X=<$F -> X-$A+10;
+hex2dec(X) when X>=$a,X=<$f -> X-$a+10.
+
+%% decode_base64 (DEBUG STRING: QWxhZGRpbjpvcGVuIHNlc2FtZQ==)
+
+decode_base64([]) ->
+ [];
+decode_base64([Sextet1,Sextet2,$=,$=|Rest]) ->
+ Bits2x6=
+ (d(Sextet1) bsl 18) bor
+ (d(Sextet2) bsl 12),
+ Octet1=Bits2x6 bsr 16,
+ [Octet1|decode_base64(Rest)];
+decode_base64([Sextet1,Sextet2,Sextet3,$=|Rest]) ->
+ Bits3x6=
+ (d(Sextet1) bsl 18) bor
+ (d(Sextet2) bsl 12) bor
+ (d(Sextet3) bsl 6),
+ Octet1=Bits3x6 bsr 16,
+ Octet2=(Bits3x6 bsr 8) band 16#ff,
+ [Octet1,Octet2|decode_base64(Rest)];
+decode_base64([Sextet1,Sextet2,Sextet3,Sextet4|Rest]) ->
+ Bits4x6=
+ (d(Sextet1) bsl 18) bor
+ (d(Sextet2) bsl 12) bor
+ (d(Sextet3) bsl 6) bor
+ d(Sextet4),
+ Octet1=Bits4x6 bsr 16,
+ Octet2=(Bits4x6 bsr 8) band 16#ff,
+ Octet3=Bits4x6 band 16#ff,
+ [Octet1,Octet2,Octet3|decode_base64(Rest)];
+decode_base64(CatchAll) ->
+ "BAD!".
+
+d(X) when X >= $A, X =<$Z ->
+ X-65;
+d(X) when X >= $a, X =<$z ->
+ X-71;
+d(X) when X >= $0, X =<$9 ->
+ X+4;
+d($+) -> 62;
+d($/) -> 63;
+d(_) -> 63.
+
+
+encode_base64([]) ->
+ [];
+encode_base64([A]) ->
+ [e(A bsr 2), e((A band 3) bsl 4), $=, $=];
+encode_base64([A,B]) ->
+ [e(A bsr 2), e(((A band 3) bsl 4) bor (B bsr 4)), e((B band 15) bsl 2), $=];
+encode_base64([A,B,C|Ls]) ->
+ encode_base64_do(A,B,C, Ls).
+encode_base64_do(A,B,C, Rest) ->
+ BB = (A bsl 16) bor (B bsl 8) bor C,
+ [e(BB bsr 18), e((BB bsr 12) band 63),
+ e((BB bsr 6) band 63), e(BB band 63)|encode_base64(Rest)].
+
+e(X) when X >= 0, X < 26 -> X+65;
+e(X) when X>25, X<52 -> X+71;
+e(X) when X>51, X<62 -> X-4;
+e(62) -> $+;
+e(63) -> $/;
+e(X) -> exit({bad_encode_base64_token, X}).
+
+
+%% flatlength
+
+flatlength(List) ->
+ flatlength(List, 0).
+
+flatlength([H|T],L) when list(H) ->
+ flatlength(H,flatlength(T,L));
+flatlength([H|T],L) when binary(H) ->
+ flatlength(T,L+size(H));
+flatlength([H|T],L) ->
+ flatlength(T,L+1);
+flatlength([],L) ->
+ L.
+
+%% split_path
+
+split_path(Path) ->
+ case regexp:match(Path,"[\?].*\$") of
+ %% A QUERY_STRING exists!
+ {match,Start,Length} ->
+ {httpd_util:decode_hex(string:substr(Path,1,Start-1)),
+ string:substr(Path,Start,Length)};
+ %% A possible PATH_INFO exists!
+ nomatch ->
+ split_path(Path,[])
+ end.
+
+split_path([],SoFar) ->
+ {httpd_util:decode_hex(lists:reverse(SoFar)),[]};
+split_path([$/|Rest],SoFar) ->
+ Path=httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok,FileInfo} when FileInfo#file_info.type == regular ->
+ {Path,[$/|Rest]};
+ {ok,FileInfo} ->
+ split_path(Rest,[$/|SoFar]);
+ {error,Reason} ->
+ split_path(Rest,[$/|SoFar])
+ end;
+split_path([C|Rest],SoFar) ->
+ split_path(Rest,[C|SoFar]).
+
+%% split_script_path
+
+split_script_path(Path) ->
+ case split_script_path(Path, []) of
+ {Script, AfterPath} ->
+ {PathInfo, QueryString} = pathinfo_querystring(AfterPath),
+ {Script, {PathInfo, QueryString}};
+ not_a_script ->
+ not_a_script
+ end.
+
+pathinfo_querystring(Str) ->
+ pathinfo_querystring(Str, []).
+pathinfo_querystring([], SoFar) ->
+ {lists:reverse(SoFar), []};
+pathinfo_querystring([$?|Rest], SoFar) ->
+ {lists:reverse(SoFar), Rest};
+pathinfo_querystring([C|Rest], SoFar) ->
+ pathinfo_querystring(Rest, [C|SoFar]).
+
+split_script_path([$?|QueryString], SoFar) ->
+ Path = httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok,FileInfo} when FileInfo#file_info.type == regular ->
+ {Path, [$?|QueryString]};
+ {ok,FileInfo} ->
+ not_a_script;
+ {error,Reason} ->
+ not_a_script
+ end;
+split_script_path([], SoFar) ->
+ Path = httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok,FileInfo} when FileInfo#file_info.type == regular ->
+ {Path, []};
+ {ok,FileInfo} ->
+ not_a_script;
+ {error,Reason} ->
+ not_a_script
+ end;
+split_script_path([$/|Rest], SoFar) ->
+ Path = httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok, FileInfo} when FileInfo#file_info.type == regular ->
+ {Path, [$/|Rest]};
+ {ok, _FileInfo} ->
+ split_script_path(Rest, [$/|SoFar]);
+ {error, _Reason} ->
+ split_script_path(Rest, [$/|SoFar])
+ end;
+split_script_path([C|Rest], SoFar) ->
+ split_script_path(Rest,[C|SoFar]).
+
+%% suffix
+
+suffix(Path) ->
+ case filename:extension(Path) of
+ [] ->
+ [];
+ Extension ->
+ tl(Extension)
+ end.
+
+%% to_upper
+
+to_upper([C|Cs]) when C >= $a, C =< $z ->
+ [C-($a-$A)|to_upper(Cs)];
+to_upper([C|Cs]) ->
+ [C|to_upper(Cs)];
+to_upper([]) ->
+ [].
+
+%% to_lower
+
+to_lower([C|Cs]) when C >= $A, C =< $Z ->
+ [C+($a-$A)|to_lower(Cs)];
+to_lower([C|Cs]) ->
+ [C|to_lower(Cs)];
+to_lower([]) ->
+ [].
+
+
+%% strip
+strip(Value)->
+ lists:reverse(remove_ws(lists:reverse(remove_ws(Value)))).
+
+remove_ws([$\s|Rest])->
+ remove_ws(Rest);
+remove_ws([$\t|Rest]) ->
+ remove_ws(Rest);
+remove_ws(Rest) ->
+ Rest.
+
+%% split
+
+split(String,RegExp,Limit) ->
+ case regexp:parse(RegExp) of
+ {error,Reason} ->
+ {error,Reason};
+ {ok,_} ->
+ {ok,do_split(String,RegExp,Limit)}
+ end.
+
+do_split(String,RegExp,1) ->
+ [String];
+
+do_split(String,RegExp,Limit) ->
+ case regexp:first_match(String,RegExp) of
+ {match,Start,Length} ->
+ [string:substr(String,1,Start-1)|
+ do_split(lists:nthtail(Start+Length-1,String),RegExp,Limit-1)];
+ nomatch ->
+ [String]
+ end.
+
+%% header
+header(StatusCode,Date)when list(Date)->
+ header(StatusCode,"text/plain",false);
+
+header(StatusCode, PersistentConnection) when integer(StatusCode)->
+ Date = rfc1123_date(),
+ Connection =
+ case PersistentConnection of
+ true ->
+ "";
+ _ ->
+ "Connection: close \r\n"
+ end,
+ io_lib:format("HTTP/1.1 ~w ~s \r\nDate: ~s\r\nServer: ~s\r\n~s",
+ [StatusCode, httpd_util:reason_phrase(StatusCode),
+ Date, ?SERVER_SOFTWARE, Connection]).
+
+%%----------------------------------------------------------------------
+
+header(StatusCode, MimeType, Date) when list(Date) ->
+ header(StatusCode, MimeType, false,rfc1123_date());
+
+
+header(StatusCode, MimeType, PersistentConnection) when integer(StatusCode) ->
+ header(StatusCode, MimeType, PersistentConnection,rfc1123_date()).
+
+
+%%----------------------------------------------------------------------
+
+header(416, MimeType,PersistentConnection,Date)->
+ Connection =
+ case PersistentConnection of
+ true ->
+ "";
+ _ ->
+ "Connection: close \r\n"
+ end,
+ io_lib:format("HTTP/1.1 ~w ~s \r\nDate: ~s\r\nServer: ~s\r\n"
+ "Content-Range:bytes *"
+ "Content-Type: ~s\r\n~s",
+ [416, httpd_util:reason_phrase(416),
+ Date, ?SERVER_SOFTWARE, MimeType, Connection]);
+
+
+header(StatusCode, MimeType,PersistentConnection,Date) when integer(StatusCode)->
+ Connection =
+ case PersistentConnection of
+ true ->
+ "";
+ _ ->
+ "Connection: close \r\n"
+ end,
+ io_lib:format("HTTP/1.1 ~w ~s \r\nDate: ~s\r\nServer: ~s\r\n"
+ "Content-Type: ~s\r\n~s",
+ [StatusCode, httpd_util:reason_phrase(StatusCode),
+ Date, ?SERVER_SOFTWARE, MimeType, Connection]).
+
+
+
+%% make_name/2, make_name/3
+%% Prefix -> string()
+%% First part of the name, e.g. "httpd"
+%% Addr -> {A,B,C,D} | string() | undefined
+%% The address part of the name.
+%% e.g. "123.234.55.66" or {123,234,55,66} or "otp.ericsson.se"
+%% for a host address or undefined if local host.
+%% Port -> integer()
+%% Last part of the name, such as the HTTPD server port
+%% number (80).
+%% Postfix -> Any string that will be added last to the name
+%%
+%% Example:
+%% make_name("httpd","otp.ericsson.se",80) => httpd__otp_ericsson_se__80
+%% make_name("httpd",undefined,8088) => httpd_8088
+
+make_name(Prefix,Port) ->
+ make_name(Prefix,undefined,Port,"").
+
+make_name(Prefix,Addr,Port) ->
+ make_name(Prefix,Addr,Port,"").
+
+make_name(Prefix,"*",Port,Postfix) ->
+ make_name(Prefix,undefined,Port,Postfix);
+
+make_name(Prefix,any,Port,Postfix) ->
+ make_name1(io_lib:format("~s_~w~s",[Prefix,Port,Postfix]));
+
+make_name(Prefix,undefined,Port,Postfix) ->
+ make_name1(io_lib:format("~s_~w~s",[Prefix,Port,Postfix]));
+
+make_name(Prefix,Addr,Port,Postfix) ->
+ NameString =
+ Prefix ++ "__" ++ make_name2(Addr) ++ "__" ++
+ integer_to_list(Port) ++ Postfix,
+ make_name1(NameString).
+
+make_name1(String) ->
+ list_to_atom(lists:flatten(String)).
+
+make_name2({A,B,C,D}) ->
+ io_lib:format("~w_~w_~w_~w",[A,B,C,D]);
+make_name2(Addr) ->
+ search_and_replace(Addr,$.,$_).
+
+search_and_replace(S,A,B) ->
+ Fun = fun(What) ->
+ case What of
+ A -> B;
+ O -> O
+ end
+ end,
+ lists:map(Fun,S).
+
+
+
+%%----------------------------------------------------------------------
+%% Converts a string that constists of 0-9,A-F,a-f to a
+%% integer
+%%----------------------------------------------------------------------
+
+hexlist_to_integer([])->
+ empty;
+
+
+%%When the string only contains one value its eaasy done.
+%% 0-9
+hexlist_to_integer([Size]) when Size>=48 , Size=<57 ->
+ Size-48;
+%% A-F
+hexlist_to_integer([Size]) when Size>=65 , Size=<70 ->
+ Size-55;
+%% a-f
+hexlist_to_integer([Size]) when Size>=97 , Size=<102 ->
+ Size-87;
+hexlist_to_integer([Size]) ->
+ not_a_num;
+
+hexlist_to_integer(Size) ->
+ Len=string:span(Size,"1234567890abcdefABCDEF"),
+ hexlist_to_integer2(Size,16 bsl (4 *(Len-2)),0).
+
+hexlist_to_integer2([],_Pos,Sum)->
+ Sum;
+hexlist_to_integer2([HexVal|HexString],Pos,Sum)when HexVal>=48,HexVal=<57->
+ hexlist_to_integer2(HexString,Pos bsr 4,Sum+((HexVal-48)*Pos));
+
+hexlist_to_integer2([HexVal|HexString],Pos,Sum)when HexVal>=65,HexVal=<70->
+ hexlist_to_integer2(HexString,Pos bsr 4,Sum+((HexVal-55)*Pos));
+
+hexlist_to_integer2([HexVal|HexString],Pos,Sum)when HexVal>=97,HexVal=<102->
+ hexlist_to_integer2(HexString,Pos bsr 4,Sum+((HexVal-87)*Pos));
+
+hexlist_to_integer2(_AfterHexString,_Pos,Sum)->
+ Sum.
+
+%%----------------------------------------------------------------------
+%%Converts an integer to an hexlist
+%%----------------------------------------------------------------------
+encode_hex(Num)->
+ integer_to_hexlist(Num).
+
+
+integer_to_hexlist(Num)->
+ integer_to_hexlist(Num,getSize(Num),[]).
+
+integer_to_hexlist(Num,Pot,Res) when Pot<0 ->
+ convert_to_ascii([Num|Res]);
+
+integer_to_hexlist(Num,Pot,Res) ->
+ Position=(16 bsl (Pot*4)),
+ PosVal=Num div Position,
+ integer_to_hexlist(Num-(PosVal*Position),Pot-1,[PosVal|Res]).
+convert_to_ascii(RevesedNum)->
+ convert_to_ascii(RevesedNum,[]).
+
+convert_to_ascii([],Num)->
+ Num;
+convert_to_ascii([Num|Reversed],Number)when Num>-1, Num<10 ->
+ convert_to_ascii(Reversed,[Num+48|Number]);
+convert_to_ascii([Num|Reversed],Number)when Num>9, Num<16 ->
+ convert_to_ascii(Reversed,[Num+55|Number]);
+convert_to_ascii(NumReversed,Number) ->
+ error.
+
+
+
+getSize(Num)->
+ getSize(Num,0).
+
+getSize(Num,Pot)when Num<(16 bsl(Pot *4)) ->
+ Pot-1;
+
+getSize(Num,Pot) ->
+ getSize(Num,Pot+1).
+
+
+
+
+
+create_etag(FileInfo)->
+ create_etag(FileInfo#file_info.mtime,FileInfo#file_info.size).
+
+create_etag({{Year,Month,Day},{Hour,Min,Sec}},Size)->
+ create_part([Year,Month,Day,Hour,Min,Sec])++io_lib:write(Size);
+
+create_etag(FileInfo,Size)->
+ create_etag(FileInfo#file_info.mtime,Size).
+
+create_part(Values)->
+ lists:map(fun(Val0)->
+ Val=Val0 rem 60,
+ if
+ Val=<25 ->
+ 65+Val; % A-Z
+ Val=<50 ->
+ 72+Val; % a-z
+ %%Since no date s
+ true ->
+ Val-3
+ end
+ end,Values).
+
+
+
+%%----------------------------------------------------------------------
+%%Function that controls whether a response is generated or not
+%%----------------------------------------------------------------------
+response_generated(Info)->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason}->
+ true;
+ %%No status code control repsonsxe
+ undefined ->
+ case httpd_util:key1search(Info#mod.data, response) of
+ %% No response has been generated!
+ undefined ->
+ false;
+ %% A response has been generated or sent!
+ Response ->
+ true
+ end
+ end.
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.erl
new file mode 100644
index 0000000000..c772a11dd1
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.erl
@@ -0,0 +1,94 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_verbosity.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_verbosity).
+
+-include_lib("stdlib/include/erl_compile.hrl").
+
+-export([print/4,print/5,printc/4,validate/1]).
+
+print(silence,_Severity,_Format,_Arguments) ->
+ ok;
+print(Verbosity,Severity,Format,Arguments) ->
+ print1(printable(Verbosity,Severity),Format,Arguments).
+
+
+print(silence,_Severity,_Module,_Format,_Arguments) ->
+ ok;
+print(Verbosity,Severity,Module,Format,Arguments) ->
+ print1(printable(Verbosity,Severity),Module,Format,Arguments).
+
+
+printc(silence,Severity,Format,Arguments) ->
+ ok;
+printc(Verbosity,Severity,Format,Arguments) ->
+ print2(printable(Verbosity,Severity),Format,Arguments).
+
+
+print1(false,_Format,_Arguments) -> ok;
+print1(Verbosity,Format,Arguments) ->
+ V = image_of_verbosity(Verbosity),
+ S = image_of_sname(get(sname)),
+ io:format("** HTTPD ~s ~s: " ++ Format ++ "~n",[S,V]++Arguments).
+
+print1(false,_Module,_Format,_Arguments) -> ok;
+print1(Verbosity,Module,Format,Arguments) ->
+ V = image_of_verbosity(Verbosity),
+ S = image_of_sname(get(sname)),
+ io:format("** HTTPD ~s ~s ~s: " ++ Format ++ "~n",[S,Module,V]++Arguments).
+
+
+print2(false,_Format,_Arguments) -> ok;
+print2(_Verbosity,Format,Arguments) ->
+ io:format(Format ++ "~n",Arguments).
+
+
+%% printable(Verbosity,Severity)
+printable(info,info) -> info;
+printable(log,info) -> info;
+printable(log,log) -> log;
+printable(debug,info) -> info;
+printable(debug,log) -> log;
+printable(debug,debug) -> debug;
+printable(trace,V) -> V;
+printable(_Verb,_Sev) -> false.
+
+
+image_of_verbosity(info) -> "INFO";
+image_of_verbosity(log) -> "LOG";
+image_of_verbosity(debug) -> "DEBUG";
+image_of_verbosity(trace) -> "TRACE";
+image_of_verbosity(_) -> "".
+
+%% ShortName
+image_of_sname(acc) -> "ACCEPTOR";
+image_of_sname(acc_sup) -> "ACCEPTOR_SUP";
+image_of_sname(auth) -> "AUTH";
+image_of_sname(man) -> "MANAGER";
+image_of_sname(misc_sup) -> "MISC_SUP";
+image_of_sname(sec) -> "SECURITY";
+image_of_sname(P) when pid(P) -> io_lib:format("REQUEST_HANDLER(~p)",[P]);
+image_of_sname(undefined) -> "";
+image_of_sname(V) -> io_lib:format("~p",[V]).
+
+
+validate(info) -> info;
+validate(log) -> log;
+validate(debug) -> debug;
+validate(trace) -> trace;
+validate(_) -> silence.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.hrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.hrl
new file mode 100644
index 0000000000..caafd8ef18
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/httpd_verbosity.hrl
@@ -0,0 +1,65 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_verbosity.hrl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+
+-ifndef(dont_use_verbosity).
+
+-ifndef(default_verbosity).
+-define(default_verbosity,silence).
+-endif.
+
+-define(vvalidate(V), httpd_verbosity:validate(V)).
+
+-ifdef(VMODULE).
+
+-define(vinfo(F,A), httpd_verbosity:print(get(verbosity),info, ?VMODULE,F,A)).
+-define(vlog(F,A), httpd_verbosity:print(get(verbosity),log, ?VMODULE,F,A)).
+-define(vdebug(F,A),httpd_verbosity:print(get(verbosity),debug,?VMODULE,F,A)).
+-define(vtrace(F,A),httpd_verbosity:print(get(verbosity),trace,?VMODULE,F,A)).
+
+-else.
+
+-define(vinfo(F,A), httpd_verbosity:print(get(verbosity),info, F,A)).
+-define(vlog(F,A), httpd_verbosity:print(get(verbosity),log, F,A)).
+-define(vdebug(F,A),httpd_verbosity:print(get(verbosity),debug,F,A)).
+-define(vtrace(F,A),httpd_verbosity:print(get(verbosity),trace,F,A)).
+
+-endif.
+
+-define(vinfoc(F,A), httpd_verbosity:printc(get(verbosity),info, F,A)).
+-define(vlogc(F,A), httpd_verbosity:printc(get(verbosity),log, F,A)).
+-define(vdebugc(F,A),httpd_verbosity:printc(get(verbosity),debug,F,A)).
+-define(vtracec(F,A),httpd_verbosity:printc(get(verbosity),trace,F,A)).
+
+-else.
+
+-define(vvalidate(V),ok).
+
+-define(vinfo(F,A),ok).
+-define(vlog(F,A),ok).
+-define(vdebug(F,A),ok).
+-define(vtrace(F,A),ok).
+
+-define(vinfoc(F,A),ok).
+-define(vlogc(F,A),ok).
+-define(vdebugc(F,A),ok).
+-define(vtracec(F,A),ok).
+
+-endif.
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.app.src b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.app.src
new file mode 100644
index 0000000000..1bf5fcc56e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.app.src
@@ -0,0 +1,56 @@
+{application,inets,
+ [{description,"INETS CXC 138 49"},
+ {vsn,"%VSN%"},
+ {modules,[
+ %% FTP
+ ftp,
+
+ %% HTTP client:
+ http,
+ http_lib,
+ httpc_handler,
+ httpc_manager,
+ uri,
+
+ %% HTTP server:
+ httpd,
+ httpd_acceptor,
+ httpd_acceptor_sup,
+ httpd_conf,
+ httpd_example,
+ httpd_manager,
+ httpd_misc_sup,
+ httpd_parse,
+ httpd_request_handler,
+ httpd_response,
+ httpd_socket,
+ httpd_sup,
+ httpd_util,
+ httpd_verbosity,
+ inets_sup,
+ mod_actions,
+ mod_alias,
+ mod_auth,
+ mod_auth_dets,
+ mod_auth_mnesia,
+ mod_auth_plain,
+ mod_auth_server,
+ mod_browser,
+ mod_cgi,
+ mod_dir,
+ mod_disk_log,
+ mod_esi,
+ mod_get,
+ mod_head,
+ mod_htaccess,
+ mod_include,
+ mod_log,
+ mod_range,
+ mod_responsecontrol,
+ mod_security,
+ mod_security_server,
+ mod_trace
+ ]},
+ {registered,[inets_sup]},
+ {applications,[kernel,stdlib]},
+ {mod,{inets_sup,[]}}]}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.appup.src b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.appup.src
new file mode 100644
index 0000000000..f612dc5b91
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.appup.src
@@ -0,0 +1,135 @@
+{"%VSN%",
+ [{"3.0.5",
+ [
+ {load_module, ftp, soft_purge, soft_purge, []}
+ ]
+ },
+ {"3.0.4",
+ [
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []}
+ ]
+ },
+ {"3.0.3",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [mod_disk_log, httpd_conf, httpd_socket]}]
+ },
+ {"3.0.2",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0.1",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge,
+ [httpd_manager, httpd_misc_sup]},
+ {update, httpd_misc_sup, soft, soft_purge, soft_purge, []},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ }
+ ],
+ [{"3.0.5",
+ [
+ {load_module, ftp, soft_purge, soft_purge, []}
+ ]
+ },
+ {"3.0.4",
+ [{update, httpd_acceptor, soft, soft_purge, soft_purge, []}]
+ },
+ {"3.0.3",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [mod_disk_log, httpd_conf, httpd_socket]}]
+ },
+ {"3.0.2",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0.1",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge,
+ [httpd_manager, httpd_misc_sup]},
+ {update, httpd_misc_sup, soft, soft_purge, soft_purge, []},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ }
+ ]
+}.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.config b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.config
new file mode 100644
index 0000000000..adf0e3ecf1
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets.config
@@ -0,0 +1,2 @@
+[{inets,[{services,[{httpd,"/var/tmp/server_root/conf/8888.conf"},
+ {httpd,"/var/tmp/server_root/conf/8080.conf"}]}]}].
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets_sup.erl
new file mode 100644
index 0000000000..6bda87148c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/inets_sup.erl
@@ -0,0 +1,158 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: inets_sup.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(inets_sup).
+
+-export([crock/0]).
+-export([start/2, stop/1, init/1]).
+-export([start_child/2, stop_child/2, which_children/0]).
+
+
+%% crock (Used for debugging!)
+
+crock() ->
+ application:start(sasl),
+ application:start(inets).
+
+
+%% start
+
+start(Type, State) ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+
+%% stop
+
+stop(State) ->
+ ok.
+
+
+%% start_child
+
+start_child(ConfigFile, Verbosity) ->
+ {ok, Spec} = httpd_child_spec(ConfigFile, Verbosity),
+ supervisor:start_child(?MODULE, Spec).
+
+
+%% stop_child
+
+stop_child(Addr, Port) ->
+ Name = {httpd_sup, Addr, Port},
+ case supervisor:terminate_child(?MODULE, Name) of
+ ok ->
+ supervisor:delete_child(?MODULE, Name);
+ Error ->
+ Error
+ end.
+
+
+%% which_children
+
+which_children() ->
+ supervisor:which_children(?MODULE).
+
+
+%% init
+
+init([]) ->
+ case get_services() of
+ {error, Reason} ->
+ {error,Reason};
+ Services ->
+ SupFlags = {one_for_one, 10, 3600},
+ {ok, {SupFlags, child_spec(Services, [])}}
+ end.
+
+get_services() ->
+ case (catch application:get_env(inets, services)) of
+ {ok, Services} ->
+ Services;
+ _ ->
+ []
+ end.
+
+
+child_spec([], Acc) ->
+ Acc;
+child_spec([{httpd, ConfigFile, Verbosity}|Rest], Acc) ->
+ case httpd_child_spec(ConfigFile, Verbosity) of
+ {ok, Spec} ->
+ child_spec(Rest, [Spec | Acc]);
+ {error, Reason} ->
+ error_msg("Failed creating child spec "
+ "using ~p for reason: ~p", [ConfigFile, Reason]),
+ child_spec(Rest, Acc)
+ end;
+child_spec([{httpd, ConfigFile}|Rest], Acc) ->
+ case httpd_child_spec(ConfigFile, []) of
+ {ok, Spec} ->
+ child_spec(Rest, [Spec | Acc]);
+ {error, Reason} ->
+ error_msg("Failed creating child spec "
+ "using ~p for reason: ~p", [ConfigFile, Reason]),
+ child_spec(Rest, Acc)
+ end.
+
+
+httpd_child_spec(ConfigFile, Verbosity) ->
+ case httpd_conf:load(ConfigFile) of
+ {ok, ConfigList} ->
+ Port = httpd_util:key1search(ConfigList, port, 80),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ {ok, httpd_child_spec(ConfigFile, Addr, Port, Verbosity)};
+ Error ->
+ Error
+ end.
+
+
+httpd_child_spec(ConfigFile, Addr, Port, Verbosity) ->
+ {{httpd_sup, Addr, Port},{httpd_sup, start_link,[ConfigFile, Verbosity]},
+ permanent, 20000, supervisor,
+ [ftp,
+ httpd,
+ httpd_conf,
+ httpd_example,
+ httpd_manager,
+ httpd_misc_sup,
+ httpd_listener,
+ httpd_parse,
+ httpd_request,
+ httpd_response,
+ httpd_socket,
+ httpd_sup,
+ httpd_util,
+ httpd_verbosity,
+ inets_sup,
+ mod_actions,
+ mod_alias,
+ mod_auth,
+ mod_cgi,
+ mod_dir,
+ mod_disk_log,
+ mod_esi,
+ mod_get,
+ mod_head,
+ mod_include,
+ mod_log,
+ mod_auth_mnesia,
+ mod_auth_plain,
+ mod_auth_dets,
+ mod_security]}.
+
+
+error_msg(F, A) ->
+ error_logger:error_msg(F ++ "~n", A).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/jnets_httpd.hrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/jnets_httpd.hrl
new file mode 100644
index 0000000000..721a6b991d
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/jnets_httpd.hrl
@@ -0,0 +1,138 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+-include_lib("kernel/include/file.hrl").
+
+-define(SOCKET_CHUNK_SIZE,8192).
+-define(SOCKET_MAX_POLL,25).
+-define(FILE_CHUNK_SIZE,64*1024).
+-define(NICE(Reason),lists:flatten(atom_to_list(?MODULE)++": "++Reason)).
+-define(DEFAULT_CONTEXT,
+ [{errmsg,"[an error occurred while processing this directive]"},
+ {timefmt,"%A, %d-%b-%y %T %Z"},
+ {sizefmt,"abbrev"}]).
+
+
+-ifdef(inets_debug).
+-define(DEBUG(Format, Args), io:format("D(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(DEBUG(F,A),[]).
+-endif.
+
+-define(MAXBODYSIZE,16#ffffffff).
+
+-define(HTTP_VERSION_10,0).
+-define(HTTP_VERSION_11,1).
+
+-define(CR,13).
+-define(LF,10).
+
+
+-record(init_data,{peername,resolve}).
+
+
+-record(mod,{
+ init_data, %
+ data= [], % list() Used to propagate data between modules
+ socket_type=ip_comm, % socket_type() IP or SSL socket
+ socket, % socket() Actual socket
+ config_db, % ets() {key,val} db with config entries
+ method, % atom() HTTP method, e.g. 'GET'
+% request_uri, % string() Request URI
+ path, % string() Absolute path. May include query etc
+ http_version, % int() HTTP minor version number, e.g. 0 or 1
+% request_line, % string() Request Line
+ headers, % #req_headers{} Parsed request headers
+ entity_body= <<>>, % binary() Body of request
+ connection, % boolean() true if persistant connection
+ status_code, % int() Status code
+ logging % int() 0=No logging
+ % 1=Only mod_log present
+ % 2=Only mod_disk_log present
+ % 3=Both mod_log and mod_disk_log present
+ }).
+
+% -record(ssl,{
+% certfile, %
+% keyfile, %
+% verify= 0, %
+% ciphers, %
+% password, %
+% depth = 1, %
+% cacertfile, %
+
+% cachetimeout % Found in yaws....
+% }).
+
+
+-record(http_request,{
+ method, % atom() if known else string() HTTP methd
+ path, % {abs_path,string()} URL path
+ version % {int(),int()} {Major,Minor} HTTP version
+ }).
+
+-record(http_response,{
+ version, % {int(),int()} {Major,Minor} HTTP version
+ status, % int() Status code
+ phrase % string() HTTP Reason phrase
+ }).
+
+
+%%% Request headers
+-record(req_headers,{
+%%% --- Standard "General" headers
+% cache_control,
+ connection="keep-alive",
+% date,
+% pragma,
+% trailer,
+ transfer_encoding,
+% upgrade,
+% via,
+% warning,
+%%% --- Standard "Request" headers
+% accept,
+% accept_charset,
+% accept_encoding,
+% accept_language,
+ authorization,
+ expect, %% FIXME! Update inet_drv.c!!
+% from,
+ host,
+ if_match,
+ if_modified_since,
+ if_none_match,
+ if_range,
+ if_unmodified_since,
+% max_forwards,
+% proxy_authorization,
+ range,
+% referer,
+% te, %% FIXME! Update inet_drv.c!!
+ user_agent,
+%%% --- Standard "Entity" headers
+% content_encoding,
+% content_language,
+ content_length="0",
+% content_location,
+% content_md5,
+% content_range,
+ content_type,
+% last_modified,
+ other=[] % (list) Key/Value list with other headers
+ }).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_actions.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_actions.erl
new file mode 100644
index 0000000000..93bdb9fb40
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_actions.erl
@@ -0,0 +1,92 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_actions.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(mod_actions).
+-export([do/1,load/2]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ Path=mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ Suffix=httpd_util:suffix(Path),
+ MimeType=httpd_util:lookup_mime(Info#mod.config_db,Suffix,
+ "text/plain"),
+ Actions=httpd_util:multi_lookup(Info#mod.config_db,action),
+ case action(Info#mod.request_uri,MimeType,Actions) of
+ {yes,RequestURI} ->
+ {proceed,[{new_request_uri,RequestURI}|Info#mod.data]};
+ no ->
+ Scripts=httpd_util:multi_lookup(Info#mod.config_db,script),
+ case script(Info#mod.request_uri,Info#mod.method,Scripts) of
+ {yes,RequestURI} ->
+ {proceed,[{new_request_uri,RequestURI}|Info#mod.data]};
+ no ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+action(RequestURI,MimeType,[]) ->
+ no;
+action(RequestURI,MimeType,[{MimeType,CGIScript}|Rest]) ->
+ {yes,CGIScript++RequestURI};
+action(RequestURI,MimeType,[_|Rest]) ->
+ action(RequestURI,MimeType,Rest).
+
+script(RequestURI,Method,[]) ->
+ no;
+script(RequestURI,Method,[{Method,CGIScript}|Rest]) ->
+ {yes,CGIScript++RequestURI};
+script(RequestURI,Method,[_|Rest]) ->
+ script(RequestURI,Method,Rest).
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$A,$c,$t,$i,$o,$n,$ |Action],[]) ->
+ case regexp:split(Action," ") of
+ {ok,[MimeType,CGIScript]} ->
+ {ok,[],{action,{MimeType,CGIScript}}};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Action)++" is an invalid Action")}
+ end;
+load([$S,$c,$r,$i,$p,$t,$ |Script],[]) ->
+ case regexp:split(Script," ") of
+ {ok,[Method,CGIScript]} ->
+ {ok,[],{script,{Method,CGIScript}}};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Script)++" is an invalid Script")}
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_alias.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_alias.erl
new file mode 100644
index 0000000000..e01c18b3d6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_alias.erl
@@ -0,0 +1,175 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_alias.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(mod_alias).
+-export([do/1,real_name/3,real_script_name/3,default_index/2,load/2,path/3]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_alias(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+do_alias(Info) ->
+ ?DEBUG("do_alias -> Request URI: ~p",[Info#mod.request_uri]),
+ {ShortPath,Path,AfterPath} =
+ real_name(Info#mod.config_db,Info#mod.request_uri,
+ httpd_util:multi_lookup(Info#mod.config_db,alias)),
+ %% Relocate if a trailing slash is missing else proceed!
+ LastChar = lists:last(ShortPath),
+ case file:read_file_info(ShortPath) of
+ {ok,FileInfo} when FileInfo#file_info.type == directory,LastChar /= $/ ->
+ ?LOG("do_alias -> ~n"
+ " ShortPath: ~p~n"
+ " LastChar: ~p~n"
+ " FileInfo: ~p",
+ [ShortPath,LastChar,FileInfo]),
+ ServerName = httpd_util:lookup(Info#mod.config_db,server_name),
+ Port = port_string(httpd_util:lookup(Info#mod.config_db,port,80)),
+ URL = "http://"++ServerName++Port++Info#mod.request_uri++"/",
+ ReasonPhrase = httpd_util:reason_phrase(301),
+ Message = httpd_util:message(301,URL,Info#mod.config_db),
+ {proceed,
+ [{response,
+ {301, ["Location: ", URL, "\r\n"
+ "Content-Type: text/html\r\n",
+ "\r\n",
+ "<HTML>\n<HEAD>\n<TITLE>",ReasonPhrase,
+ "</TITLE>\n</HEAD>\n"
+ "<BODY>\n<H1>",ReasonPhrase,
+ "</H1>\n", Message,
+ "\n</BODY>\n</HTML>\n"]}}|
+ [{real_name,{Path,AfterPath}}|Info#mod.data]]};
+ NoFile ->
+ {proceed,[{real_name,{Path,AfterPath}}|Info#mod.data]}
+ end.
+
+port_string(80) ->
+ "";
+port_string(Port) ->
+ ":"++integer_to_list(Port).
+
+%% real_name
+
+real_name(ConfigDB, RequestURI,[]) ->
+ DocumentRoot = httpd_util:lookup(ConfigDB, document_root, ""),
+ RealName = DocumentRoot++RequestURI,
+ {ShortPath, _AfterPath} = httpd_util:split_path(RealName),
+ {Path, AfterPath}=httpd_util:split_path(default_index(ConfigDB,RealName)),
+ {ShortPath, Path, AfterPath};
+real_name(ConfigDB, RequestURI, [{FakeName,RealName}|Rest]) ->
+ case regexp:match(RequestURI, "^"++FakeName) of
+ {match, _, _} ->
+ {ok, ActualName, _} = regexp:sub(RequestURI,
+ "^"++FakeName, RealName),
+ {ShortPath, _AfterPath} = httpd_util:split_path(ActualName),
+ {Path, AfterPath} =
+ httpd_util:split_path(default_index(ConfigDB, ActualName)),
+ {ShortPath, Path, AfterPath};
+ nomatch ->
+ real_name(ConfigDB,RequestURI,Rest)
+ end.
+
+%% real_script_name
+
+real_script_name(ConfigDB,RequestURI,[]) ->
+ not_a_script;
+real_script_name(ConfigDB,RequestURI,[{FakeName,RealName}|Rest]) ->
+ case regexp:match(RequestURI,"^"++FakeName) of
+ {match,_,_} ->
+ {ok,ActualName,_}=regexp:sub(RequestURI,"^"++FakeName,RealName),
+ httpd_util:split_script_path(default_index(ConfigDB,ActualName));
+ nomatch ->
+ real_script_name(ConfigDB,RequestURI,Rest)
+ end.
+
+%% default_index
+
+default_index(ConfigDB, Path) ->
+ case file:read_file_info(Path) of
+ {ok, FileInfo} when FileInfo#file_info.type == directory ->
+ DirectoryIndex = httpd_util:lookup(ConfigDB, directory_index, []),
+ append_index(Path, DirectoryIndex);
+ _ ->
+ Path
+ end.
+
+append_index(RealName, []) ->
+ RealName;
+append_index(RealName, [Index|Rest]) ->
+ case file:read_file_info(filename:join(RealName, Index)) of
+ {error,Reason} ->
+ append_index(RealName, Rest);
+ _ ->
+ filename:join(RealName,Index)
+ end.
+
+%% path
+
+path(Data, ConfigDB, RequestURI) ->
+ case httpd_util:key1search(Data,real_name) of
+ undefined ->
+ DocumentRoot = httpd_util:lookup(ConfigDB, document_root, ""),
+ {Path,AfterPath} =
+ httpd_util:split_path(DocumentRoot++RequestURI),
+ Path;
+ {Path,AfterPath} ->
+ Path
+ end.
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$D,$i,$r,$e,$c,$t,$o,$r,$y,$I,$n,$d,$e,$x,$ |DirectoryIndex],[]) ->
+ {ok, DirectoryIndexes} = regexp:split(DirectoryIndex," "),
+ {ok,[], {directory_index, DirectoryIndexes}};
+load([$A,$l,$i,$a,$s,$ |Alias],[]) ->
+ case regexp:split(Alias," ") of
+ {ok, [FakeName, RealName]} ->
+ {ok,[],{alias,{FakeName,RealName}}};
+ {ok, _} ->
+ {error,?NICE(httpd_conf:clean(Alias)++" is an invalid Alias")}
+ end;
+load([$S,$c,$r,$i,$p,$t,$A,$l,$i,$a,$s,$ |ScriptAlias],[]) ->
+ case regexp:split(ScriptAlias," ") of
+ {ok, [FakeName, RealName]} ->
+ %% Make sure the path always has a trailing slash..
+ RealName1 = filename:join(filename:split(RealName)),
+ {ok, [], {script_alias,{FakeName, RealName1++"/"}}};
+ {ok, _} ->
+ {error, ?NICE(httpd_conf:clean(ScriptAlias)++
+ " is an invalid ScriptAlias")}
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.erl
new file mode 100644
index 0000000000..dadb64e3c1
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.erl
@@ -0,0 +1,750 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(mod_auth).
+
+
+%% The functions that the webbserver call on startup stop
+%% and when the server traverse the modules.
+-export([do/1, load/2, store/2, remove/1]).
+
+%% User entries to the gen-server.
+-export([add_user/2, add_user/5, add_user/6,
+ add_group_member/3, add_group_member/4, add_group_member/5,
+ list_users/1, list_users/2, list_users/3,
+ delete_user/2, delete_user/3, delete_user/4,
+ delete_group_member/3, delete_group_member/4, delete_group_member/5,
+ list_groups/1, list_groups/2, list_groups/3,
+ delete_group/2, delete_group/3, delete_group/4,
+ get_user/2, get_user/3, get_user/4,
+ list_group_members/2, list_group_members/3, list_group_members/4,
+ update_password/6, update_password/5]).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+-define(VMODULE,"AUTH").
+-include("httpd_verbosity.hrl").
+
+-define(NOPASSWORD,"NoPassword").
+
+
+%% do
+do(Info) ->
+ ?vtrace("do", []),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed, Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ %% Is it a secret area?
+ case secretp(Path,Info#mod.config_db) of
+ {yes, Directory, DirectoryData} ->
+ %% Authenticate (allow)
+ case allow((Info#mod.init_data)#init_data.peername,
+ Info#mod.socket_type,Info#mod.socket,
+ DirectoryData) of
+ allowed ->
+ case deny((Info#mod.init_data)#init_data.peername,
+ Info#mod.socket_type, Info#mod.socket,
+ DirectoryData) of
+ not_denied ->
+ case httpd_util:key1search(DirectoryData,
+ auth_type) of
+ undefined ->
+ {proceed, Info#mod.data};
+ none ->
+ {proceed, Info#mod.data};
+ AuthType ->
+ do_auth(Info,
+ Directory,
+ DirectoryData,
+ AuthType)
+ end;
+ {denied, Reason} ->
+ {proceed,
+ [{status,{403,Info#mod.request_uri,Reason}}|
+ Info#mod.data]}
+ end;
+ {not_allowed, Reason} ->
+ {proceed,[{status,{403,Info#mod.request_uri,Reason}}|
+ Info#mod.data]}
+ end;
+ no ->
+ {proceed, Info#mod.data}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed, Info#mod.data}
+ end
+ end.
+
+
+do_auth(Info, Directory, DirectoryData, AuthType) ->
+ %% Authenticate (require)
+ case require(Info, Directory, DirectoryData) of
+ authorized ->
+ {proceed,Info#mod.data};
+ {authorized, User} ->
+ {proceed, [{remote_user,User}|Info#mod.data]};
+ {authorization_failed, Reason} ->
+ ?vtrace("do_auth -> authorization_failed: ~p",[Reason]),
+ {proceed, [{status,{401,none,Reason}}|Info#mod.data]};
+ {authorization_required, Realm} ->
+ ?vtrace("do_auth -> authorization_required: ~p",[Realm]),
+ ReasonPhrase = httpd_util:reason_phrase(401),
+ Message = httpd_util:message(401,none,Info#mod.config_db),
+ {proceed,
+ [{response,
+ {401,
+ ["WWW-Authenticate: Basic realm=\"",Realm,
+ "\"\r\n\r\n","<HTML>\n<HEAD>\n<TITLE>",
+ ReasonPhrase,"</TITLE>\n",
+ "</HEAD>\n<BODY>\n<H1>",ReasonPhrase,
+ "</H1>\n",Message,"\n</BODY>\n</HTML>\n"]}}|
+ Info#mod.data]};
+ {status, {StatusCode,PhraseArgs,Reason}} ->
+ {proceed, [{status,{StatusCode,PhraseArgs,Reason}}|
+ Info#mod.data]}
+ end.
+
+
+%% require
+
+require(Info, Directory, DirectoryData) ->
+ ParsedHeader = Info#mod.parsed_header,
+ ValidUsers = httpd_util:key1search(DirectoryData, require_user),
+ ValidGroups = httpd_util:key1search(DirectoryData, require_group),
+
+ %% Any user or group restrictions?
+ case ValidGroups of
+ undefined when ValidUsers == undefined ->
+ authorized;
+ _ ->
+ case httpd_util:key1search(ParsedHeader, "authorization") of
+ %% Authorization required!
+ undefined ->
+ case httpd_util:key1search(DirectoryData, auth_name) of
+ undefined ->
+ {status,{500,none,?NICE("AuthName directive not specified")}};
+ Realm ->
+ {authorization_required, Realm}
+ end;
+ %% Check credentials!
+ [$B,$a,$s,$i,$c,$ | EncodedString] ->
+ DecodedString = httpd_util:decode_base64(EncodedString),
+ case a_valid_user(Info, DecodedString,
+ ValidUsers, ValidGroups,
+ Directory, DirectoryData) of
+ {yes, User} ->
+ {authorized, User};
+ {no, Reason} ->
+ {authorization_failed, Reason};
+ {status, {StatusCode,PhraseArgs,Reason}} ->
+ {status,{StatusCode,PhraseArgs,Reason}}
+ end;
+ %% Bad credentials!
+ BadCredentials ->
+ {status,{401,none,?NICE("Bad credentials "++BadCredentials)}}
+ end
+ end.
+
+a_valid_user(Info,DecodedString,ValidUsers,ValidGroups,Dir,DirData) ->
+ case httpd_util:split(DecodedString,":",2) of
+ {ok,[SupposedUser, Password]} ->
+ case user_accepted(SupposedUser, ValidUsers) of
+ true ->
+ check_password(SupposedUser, Password, Dir, DirData);
+ false ->
+ case group_accepted(Info,SupposedUser,ValidGroups,Dir,DirData) of
+ true ->
+ check_password(SupposedUser,Password,Dir,DirData);
+ false ->
+ {no,?NICE("No such user exists")}
+ end
+ end;
+ {ok,BadCredentials} ->
+ {status,{401,none,?NICE("Bad credentials "++BadCredentials)}}
+ end.
+
+user_accepted(SupposedUser, undefined) ->
+ false;
+user_accepted(SupposedUser, ValidUsers) ->
+ lists:member(SupposedUser, ValidUsers).
+
+
+group_accepted(Info, User, undefined, Dir, DirData) ->
+ false;
+group_accepted(Info, User, [], Dir, DirData) ->
+ false;
+group_accepted(Info, User, [Group|Rest], Dir, DirData) ->
+ Ret = int_list_group_members(Group, Dir, DirData),
+ case Ret of
+ {ok, UserList} ->
+ case lists:member(User, UserList) of
+ true ->
+ true;
+ false ->
+ group_accepted(Info, User, Rest, Dir, DirData)
+ end;
+ Other ->
+ false
+ end.
+
+check_password(User, Password, Dir, DirData) ->
+ case int_get_user(DirData, User) of
+ {ok, UStruct} ->
+ case UStruct#httpd_user.password of
+ Password ->
+ %% FIXME
+ {yes, UStruct#httpd_user.username};
+ Other ->
+ {no, "No such user"} % Don't say 'Bad Password' !!!
+ end;
+ _ ->
+ {no, "No such user"}
+ end.
+
+
+%% Middle API. Theese functions call the appropriate authentication module.
+int_get_user(DirData, User) ->
+ AuthMod = auth_mod_name(DirData),
+ apply(AuthMod, get_user, [DirData, User]).
+
+int_list_group_members(Group, Dir, DirData) ->
+ AuthMod = auth_mod_name(DirData),
+ apply(AuthMod, list_group_members, [DirData, Group]).
+
+auth_mod_name(DirData) ->
+ case httpd_util:key1search(DirData, auth_type, plain) of
+ plain -> mod_auth_plain;
+ mnesia -> mod_auth_mnesia;
+ dets -> mod_auth_dets
+ end.
+
+
+%%
+%% Is it a secret area?
+%%
+
+%% secretp
+
+secretp(Path,ConfigDB) ->
+ Directories = ets:match(ConfigDB,{directory,'$1','_'}),
+ case secret_path(Path, Directories) of
+ {yes,Directory} ->
+ {yes,Directory,
+ lists:flatten(ets:match(ConfigDB,{directory,Directory,'$1'}))};
+ no ->
+ no
+ end.
+
+secret_path(Path,Directories) ->
+ secret_path(Path, httpd_util:uniq(lists:sort(Directories)),to_be_found).
+
+secret_path(Path,[],to_be_found) ->
+ no;
+secret_path(Path,[],Directory) ->
+ {yes,Directory};
+secret_path(Path,[[NewDirectory]|Rest],Directory) ->
+ case regexp:match(Path,NewDirectory) of
+ {match,_,_} when Directory == to_be_found ->
+ secret_path(Path,Rest,NewDirectory);
+ {match,_,Length} when Length > length(Directory)->
+ secret_path(Path,Rest,NewDirectory);
+ {match,_,Length} ->
+ secret_path(Path,Rest,Directory);
+ nomatch ->
+ secret_path(Path,Rest,Directory)
+ end.
+
+%%
+%% Authenticate
+%%
+
+%% allow
+
+allow({_,RemoteAddr},SocketType,Socket,DirectoryData) ->
+ Hosts = httpd_util:key1search(DirectoryData, allow_from, all),
+ case validate_addr(RemoteAddr,Hosts) of
+ true ->
+ allowed;
+ false ->
+ {not_allowed, ?NICE("Connection from your host is not allowed")}
+ end.
+
+validate_addr(RemoteAddr,all) -> % When called from 'allow'
+ true;
+validate_addr(RemoteAddr,none) -> % When called from 'deny'
+ false;
+validate_addr(RemoteAddr,[]) ->
+ false;
+validate_addr(RemoteAddr,[HostRegExp|Rest]) ->
+ ?DEBUG("validate_addr -> RemoteAddr: ~p HostRegExp: ~p",
+ [RemoteAddr, HostRegExp]),
+ case regexp:match(RemoteAddr, HostRegExp) of
+ {match,_,_} ->
+ true;
+ nomatch ->
+ validate_addr(RemoteAddr,Rest)
+ end.
+
+%% deny
+
+deny({_,RemoteAddr},SocketType,Socket,DirectoryData) ->
+ ?DEBUG("deny -> RemoteAddr: ~p",[RemoteAddr]),
+ Hosts = httpd_util:key1search(DirectoryData, deny_from, none),
+ ?DEBUG("deny -> Hosts: ~p",[Hosts]),
+ case validate_addr(RemoteAddr,Hosts) of
+ true ->
+ {denied, ?NICE("Connection from your host is not allowed")};
+ false ->
+ not_denied
+ end.
+
+%%
+%% Configuration
+%%
+
+%% load/2
+%%
+
+%% mod_auth recognizes the following Configuration Directives:
+%% <Directory /path/to/directory>
+%% AuthDBType
+%% AuthName
+%% AuthUserFile
+%% AuthGroupFile
+%% AuthAccessPassword
+%% require
+%% allow
+%% </Directory>
+
+%% When a <Directory> directive is found, a new context is set to
+%% [{directory, Directory, DirData}|OtherContext]
+%% DirData in this case is a key-value list of data belonging to the
+%% directory in question.
+%%
+%% When the </Directory> statement is found, the Context created earlier
+%% will be returned as a ConfigList and the context will return to the
+%% state it was previously.
+
+load([$<,$D,$i,$r,$e,$c,$t,$o,$r,$y,$ |Directory],[]) ->
+ Dir = httpd_conf:custom_clean(Directory,"",">"),
+ {ok,[{directory, Dir, [{path, Dir}]}]};
+load(eof,[{directory,Directory, DirData}|_]) ->
+ {error, ?NICE("Premature end-of-file in "++Directory)};
+
+load([$A,$u,$t,$h,$N,$a,$m,$e,$ |AuthName], [{directory,Directory, DirData}|Rest]) ->
+ {ok, [{directory,Directory,
+ [ {auth_name, httpd_conf:clean(AuthName)}|DirData]} | Rest ]};
+
+load([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e,$ |AuthUserFile0],
+ [{directory, Directory, DirData}|Rest]) ->
+ AuthUserFile = httpd_conf:clean(AuthUserFile0),
+ {ok,[{directory,Directory,
+ [ {auth_user_file, AuthUserFile}|DirData]} | Rest ]};
+
+load([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e,$ |AuthGroupFile0],
+ [{directory,Directory, DirData}|Rest]) ->
+ AuthGroupFile = httpd_conf:clean(AuthGroupFile0),
+ {ok,[{directory,Directory,
+ [ {auth_group_file, AuthGroupFile}|DirData]} | Rest]};
+
+%AuthAccessPassword
+load([$A,$u,$t,$h,$A,$c,$c,$e,$s,$s,$P,$a,$s,$s,$w,$o,$r,$d,$ |AuthAccessPassword0],
+ [{directory,Directory, DirData}|Rest]) ->
+ AuthAccessPassword = httpd_conf:clean(AuthAccessPassword0),
+ {ok,[{directory,Directory,
+ [{auth_access_password, AuthAccessPassword}|DirData]} | Rest]};
+
+
+
+
+load([$A,$u,$t,$h,$D,$B,$T,$y,$p,$e,$ |Type],
+ [{directory, Dir, DirData}|Rest]) ->
+ case httpd_conf:clean(Type) of
+ "plain" ->
+ {ok, [{directory, Dir, [{auth_type, plain}|DirData]} | Rest ]};
+ "mnesia" ->
+ {ok, [{directory, Dir, [{auth_type, mnesia}|DirData]} | Rest ]};
+ "dets" ->
+ {ok, [{directory, Dir, [{auth_type, dets}|DirData]} | Rest ]};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(Type)++" is an invalid AuthDBType")}
+ end;
+
+load([$r,$e,$q,$u,$i,$r,$e,$ |Require],[{directory,Directory, DirData}|Rest]) ->
+ case regexp:split(Require," ") of
+ {ok,["user"|Users]} ->
+ {ok,[{directory,Directory,
+ [{require_user,Users}|DirData]} | Rest]};
+ {ok,["group"|Groups]} ->
+ {ok,[{directory,Directory,
+ [{require_group,Groups}|DirData]} | Rest]};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Require)++" is an invalid require")}
+ end;
+
+load([$a,$l,$l,$o,$w,$ |Allow],[{directory,Directory, DirData}|Rest]) ->
+ case regexp:split(Allow," ") of
+ {ok,["from","all"]} ->
+ {ok,[{directory,Directory,
+ [{allow_from,all}|DirData]} | Rest]};
+ {ok,["from"|Hosts]} ->
+ {ok,[{directory,Directory,
+ [{allow_from,Hosts}|DirData]} | Rest]};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Allow)++" is an invalid allow")}
+ end;
+
+load([$d,$e,$n,$y,$ |Deny],[{directory,Directory, DirData}|Rest]) ->
+ case regexp:split(Deny," ") of
+ {ok, ["from", "all"]} ->
+ {ok,[{directory, Directory,
+ [{deny_from, all}|DirData]} | Rest]};
+ {ok, ["from"|Hosts]} ->
+ {ok,[{directory, Directory,
+ [{deny_from, Hosts}|DirData]} | Rest]};
+ {ok, _} ->
+ {error,?NICE(httpd_conf:clean(Deny)++" is an invalid deny")}
+ end;
+
+load("</Directory>",[{directory,Directory, DirData}|Rest]) ->
+ {ok, Rest, {directory, Directory, DirData}};
+
+load([$A,$u,$t,$h,$M,$n,$e,$s,$i,$a,$D,$B,$ |AuthMnesiaDB],
+ [{directory, Dir, DirData}|Rest]) ->
+ case httpd_conf:clean(AuthMnesiaDB) of
+ "On" ->
+ {ok,[{directory,Dir,[{auth_type,mnesia}|DirData]}|Rest]};
+ "Off" ->
+ {ok,[{directory,Dir,[{auth_type,plain}|DirData]}|Rest]};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(AuthMnesiaDB)++" is an invalid AuthMnesiaDB")}
+ end.
+
+%% store
+
+store({directory,Directory0, DirData0}, ConfigList) ->
+ Port = httpd_util:key1search(ConfigList, port),
+ DirData = case httpd_util:key1search(ConfigList, bind_address) of
+ undefined ->
+ [{port, Port}|DirData0];
+ Addr ->
+ [{port, Port},{bind_address,Addr}|DirData0]
+ end,
+ Directory =
+ case filename:pathtype(Directory0) of
+ relative ->
+ SR = httpd_util:key1search(ConfigList, server_root),
+ filename:join(SR, Directory0);
+ _ ->
+ Directory0
+ end,
+ AuthMod =
+ case httpd_util:key1search(DirData0, auth_type) of
+ mnesia -> mod_auth_mnesia;
+ dets -> mod_auth_dets;
+ plain -> mod_auth_plain;
+ _ -> no_module_at_all
+ end,
+ case AuthMod of
+ no_module_at_all ->
+ {ok, {directory, Directory, DirData}};
+ _ ->
+ %% Control that there are a password or add a standard password:
+ %% "NoPassword"
+ %% In this way a user must select to use a noPassword
+ Pwd = case httpd_util:key1search(DirData,auth_access_password)of
+ undefined->
+ ?NOPASSWORD;
+ PassW->
+ PassW
+ end,
+ DirDataLast = lists:keydelete(auth_access_password,1,DirData),
+ case catch AuthMod:store_directory_data(Directory, DirDataLast) of
+ ok ->
+ add_auth_password(Directory,Pwd,ConfigList),
+ {ok, {directory, Directory, DirDataLast}};
+ {ok, NewDirData} ->
+ add_auth_password(Directory,Pwd,ConfigList),
+ {ok, {directory, Directory, NewDirData}};
+ {error, Reason} ->
+ {error, Reason};
+ Other ->
+ ?ERROR("unexpected result: ~p",[Other]),
+ {error, Other}
+ end
+ end.
+
+
+add_auth_password(Dir, Pwd0, ConfigList) ->
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ Port = httpd_util:key1search(ConfigList, port),
+ mod_auth_server:start(Addr, Port),
+ mod_auth_server:add_password(Addr, Port, Dir, Pwd0).
+
+%% remove
+
+
+remove(ConfigDB) ->
+ lists:foreach(fun({directory, Dir, DirData}) ->
+ AuthMod = auth_mod_name(DirData),
+ (catch apply(AuthMod, remove, [DirData]))
+ end,
+ ets:match_object(ConfigDB,{directory,'_','_'})),
+ Addr = case lookup(ConfigDB, bind_address) of
+ [] ->
+ undefined;
+ [{bind_address, Address}] ->
+ Address
+ end,
+ [{port, Port}] = lookup(ConfigDB, port),
+ mod_auth_server:stop(Addr, Port),
+ ok.
+
+
+
+
+%% --------------------------------------------------------------------
+
+%% update_password
+
+update_password(Port, Dir, Old, New, New)->
+ update_password(undefined, Port, Dir, Old, New, New).
+
+update_password(Addr, Port, Dir, Old, New, New) when list(New) ->
+ mod_auth_server:update_password(Addr, Port, Dir, Old, New);
+
+update_password(_Addr, _Port, _Dir, _Old, New, New) ->
+ {error, badtype};
+update_password(_Addr, _Port, _Dir, _Old, New, New1) ->
+ {error, notqeual}.
+
+
+%% add_user
+
+add_user(UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ case get_options(Opt, userData) of
+ {error, Reason}->
+ {error, Reason};
+ {UserData, Password}->
+ User = [#httpd_user{username = UserName,
+ password = Password,
+ user_data = UserData}],
+ mod_auth_server:add_user(Addr, Port, Dir, User, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end.
+
+
+add_user(UserName, Password, UserData, Port, Dir) ->
+ add_user(UserName, Password, UserData, undefined, Port, Dir).
+add_user(UserName, Password, UserData, Addr, Port, Dir) ->
+ User = [#httpd_user{username = UserName,
+ password = Password,
+ user_data = UserData}],
+ mod_auth_server:add_user(Addr, Port, Dir, User, ?NOPASSWORD).
+
+
+%% get_user
+
+get_user(UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:get_user(Addr, Port, Dir, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+get_user(UserName, Port, Dir) ->
+ get_user(UserName, undefined, Port, Dir).
+get_user(UserName, Addr, Port, Dir) ->
+ mod_auth_server:get_user(Addr, Port, Dir, UserName, ?NOPASSWORD).
+
+
+%% add_group_member
+
+add_group_member(GroupName, UserName, Opt)->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ mod_auth_server:add_group_member(Addr, Port, Dir,
+ GroupName, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+add_group_member(GroupName, UserName, Port, Dir) ->
+ add_group_member(GroupName, UserName, undefined, Port, Dir).
+
+add_group_member(GroupName, UserName, Addr, Port, Dir) ->
+ mod_auth_server:add_group_member(Addr, Port, Dir,
+ GroupName, UserName, ?NOPASSWORD).
+
+
+%% delete_group_member
+
+delete_group_member(GroupName, UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:delete_group_member(Addr, Port, Dir,
+ GroupName, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+delete_group_member(GroupName, UserName, Port, Dir) ->
+ delete_group_member(GroupName, UserName, undefined, Port, Dir).
+delete_group_member(GroupName, UserName, Addr, Port, Dir) ->
+ mod_auth_server:delete_group_member(Addr, Port, Dir,
+ GroupName, UserName, ?NOPASSWORD).
+
+
+%% list_users
+
+list_users(Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:list_users(Addr, Port, Dir, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+list_users(Port, Dir) ->
+ list_users(undefined, Port, Dir).
+list_users(Addr, Port, Dir) ->
+ mod_auth_server:list_users(Addr, Port, Dir, ?NOPASSWORD).
+
+
+%% delete_user
+
+delete_user(UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:delete_user(Addr, Port, Dir, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+delete_user(UserName, Port, Dir) ->
+ delete_user(UserName, undefined, Port, Dir).
+delete_user(UserName, Addr, Port, Dir) ->
+ mod_auth_server:delete_user(Addr, Port, Dir, UserName, ?NOPASSWORD).
+
+
+%% delete_group
+
+delete_group(GroupName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ mod_auth_server:delete_group(Addr, Port, Dir, GroupName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+delete_group(GroupName, Port, Dir) ->
+ delete_group(GroupName, undefined, Port, Dir).
+delete_group(GroupName, Addr, Port, Dir) ->
+ mod_auth_server:delete_group(Addr, Port, Dir, GroupName, ?NOPASSWORD).
+
+
+%% list_groups
+
+list_groups(Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ mod_auth_server:list_groups(Addr, Port, Dir, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+list_groups(Port, Dir) ->
+ list_groups(undefined, Port, Dir).
+list_groups(Addr, Port, Dir) ->
+ mod_auth_server:list_groups(Addr, Port, Dir, ?NOPASSWORD).
+
+
+%% list_group_members
+
+list_group_members(GroupName,Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:list_group_members(Addr, Port, Dir, GroupName,
+ AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+list_group_members(GroupName, Port, Dir) ->
+ list_group_members(GroupName, undefined, Port, Dir).
+list_group_members(GroupName, Addr, Port, Dir) ->
+ mod_auth_server:list_group_members(Addr, Port, Dir, GroupName, ?NOPASSWORD).
+
+
+
+%% Opt = [{port, Port},
+%% {addr, Addr},
+%% {dir, Dir},
+%% {authPassword, AuthPassword} | FunctionSpecificData]
+get_options(Opt, mandatory)->
+ case httpd_util:key1search(Opt, port, undefined) of
+ Port when integer(Port) ->
+ case httpd_util:key1search(Opt, dir, undefined) of
+ Dir when list(Dir) ->
+ Addr = httpd_util:key1search(Opt,
+ addr,
+ undefined),
+ AuthPwd = httpd_util:key1search(Opt,
+ authPassword,
+ ?NOPASSWORD),
+ {Addr, Port, Dir, AuthPwd};
+ _->
+ {error, bad_dir}
+ end;
+ _ ->
+ {error, bad_dir}
+ end;
+
+%% FunctionSpecificData = {userData, UserData} | {password, Password}
+get_options(Opt, userData)->
+ case httpd_util:key1search(Opt, userData, undefined) of
+ undefined ->
+ {error, no_userdata};
+ UserData ->
+ case httpd_util:key1search(Opt, password, undefined) of
+ undefined->
+ {error, no_password};
+ Pwd ->
+ {UserData, Pwd}
+ end
+ end.
+
+
+lookup(Db, Key) ->
+ ets:lookup(Db, Key).
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.hrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.hrl
new file mode 100644
index 0000000000..ed3f437e60
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth.hrl
@@ -0,0 +1,27 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth.hrl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+
+-record(httpd_user,
+ {username,
+ password,
+ user_data}).
+
+-record(httpd_group,
+ {name,
+ userlist}).
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_dets.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_dets.erl
new file mode 100644
index 0000000000..89d8574e83
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_dets.erl
@@ -0,0 +1,222 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_dets.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_auth_dets).
+
+%% dets authentication storage
+
+-export([get_user/2,
+ list_group_members/2,
+ add_user/2,
+ add_group_member/3,
+ list_users/1,
+ delete_user/2,
+ list_groups/1,
+ delete_group_member/3,
+ delete_group/2,
+ remove/1]).
+
+-export([store_directory_data/2]).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+store_directory_data(Directory, DirData) ->
+ ?CDEBUG("store_directory_data -> ~n"
+ " Directory: ~p~n"
+ " DirData: ~p",
+ [Directory, DirData]),
+
+ PWFile = httpd_util:key1search(DirData, auth_user_file),
+ GroupFile = httpd_util:key1search(DirData, auth_group_file),
+ Addr = httpd_util:key1search(DirData, bind_address),
+ Port = httpd_util:key1search(DirData, port),
+
+ PWName = httpd_util:make_name("httpd_dets_pwdb",Addr,Port),
+ case dets:open_file(PWName,[{type,set},{file,PWFile},{repair,true}]) of
+ {ok, PWDB} ->
+ GDBName = httpd_util:make_name("httpd_dets_groupdb",Addr,Port),
+ case dets:open_file(GDBName,[{type,set},{file,GroupFile},{repair,true}]) of
+ {ok, GDB} ->
+ NDD1 = lists:keyreplace(auth_user_file, 1, DirData,
+ {auth_user_file, PWDB}),
+ NDD2 = lists:keyreplace(auth_group_file, 1, NDD1,
+ {auth_group_file, GDB}),
+ {ok, NDD2};
+ {error, Err}->
+ {error, {{file, GroupFile},Err}}
+ end;
+ {error, Err2} ->
+ {error, {{file, PWFile},Err2}}
+ end.
+
+%%
+%% Storage format of users in the dets table:
+%% {{UserName, Addr, Port, Dir}, Password, UserData}
+%%
+
+add_user(DirData, UStruct) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ Record = {{UStruct#httpd_user.username, Addr, Port, Dir},
+ UStruct#httpd_user.password, UStruct#httpd_user.user_data},
+ case dets:lookup(PWDB, UStruct#httpd_user.username) of
+ [Record] ->
+ {error, user_already_in_db};
+ _ ->
+ dets:insert(PWDB, Record),
+ true
+ end.
+
+get_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ User = {UserName, Addr, Port, Dir},
+ case dets:lookup(PWDB, User) of
+ [{User, Password, UserData}] ->
+ {ok, #httpd_user{username=UserName, password=Password, user_data=UserData}};
+ Other ->
+ {error, no_such_user}
+ end.
+
+list_users(DirData) ->
+ ?DEBUG("list_users -> ~n"
+ " DirData: ~p", [DirData]),
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case dets:traverse(PWDB, fun(X) -> {continue, X} end) of %% SOOOO Ugly !
+ Records when list(Records) ->
+ ?DEBUG("list_users -> ~n"
+ " Records: ~p", [Records]),
+ {ok, [UserName || {{UserName, AnyAddr, AnyPort, AnyDir}, Password, _Data} <- Records,
+ AnyAddr == Addr, AnyPort == Port, AnyDir == Dir]};
+ O ->
+ ?DEBUG("list_users -> ~n"
+ " O: ~p", [O]),
+ {ok, []}
+ end.
+
+delete_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ User = {UserName, Addr, Port, Dir},
+ case dets:lookup(PWDB, User) of
+ [{User, SomePassword, UserData}] ->
+ dets:delete(PWDB, User),
+ lists:foreach(fun(Group) -> delete_group_member(DirData, Group, UserName) end,
+ list_groups(DirData)),
+ true;
+ _ ->
+ {error, no_such_user}
+ end.
+
+%%
+%% Storage of groups in the dets table:
+%% {Group, UserList} where UserList is a list of strings.
+%%
+add_group_member(DirData, GroupName, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ case lists:member(UserName, Users) of
+ true ->
+ true;
+ false ->
+ dets:insert(GDB, {Group, [UserName|Users]}),
+ true
+ end;
+ [] ->
+ dets:insert(GDB, {Group, [UserName]}),
+ true;
+ Other ->
+ {error, Other}
+ end.
+
+list_group_members(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ {ok, Users};
+ Other ->
+ {error, no_such_group}
+ end.
+
+list_groups(DirData) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case dets:match(GDB, {'$1', '_'}) of
+ [] ->
+ {ok, []};
+ List when list(List) ->
+ Groups = lists:flatten(List),
+ {ok, [GroupName || {GroupName, AnyAddr, AnyPort, AnyDir} <- Groups,
+ AnyAddr == Addr, AnyPort == Port, AnyDir == Dir]};
+ _ ->
+ {ok, []}
+ end.
+
+delete_group_member(DirData, GroupName, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, GroupName) of
+ [{Group, Users}] ->
+ case lists:member(UserName, Users) of
+ true ->
+ dets:delete(GDB, Group),
+ dets:insert(GDB, {Group,
+ lists:delete(UserName, Users)}),
+ true;
+ false ->
+ {error, no_such_group_member}
+ end;
+ _ ->
+ {error, no_such_group}
+ end.
+
+delete_group(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ dets:delete(GDB, Group),
+ true;
+ _ ->
+ {error, no_such_group}
+ end.
+
+lookup_common(DirData) ->
+ Dir = httpd_util:key1search(DirData, path),
+ Port = httpd_util:key1search(DirData, port),
+ Addr = httpd_util:key1search(DirData, bind_address),
+ {Addr, Port, Dir}.
+
+%% remove/1
+%%
+%% Closes dets tables used by this auth mod.
+%%
+remove(DirData) ->
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ dets:close(GDB),
+ dets:close(PWDB),
+ ok.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_mnesia.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_mnesia.erl
new file mode 100644
index 0000000000..ec29022da0
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_mnesia.erl
@@ -0,0 +1,276 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_mnesia.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+-module(mod_auth_mnesia).
+-export([get_user/2,
+ list_group_members/2,
+ add_user/2,
+ add_group_member/3,
+ list_users/1,
+ delete_user/2,
+ list_groups/1,
+ delete_group_member/3,
+ delete_group/2]).
+
+-export([store_user/5, store_user/6,
+ store_group_member/5, store_group_member/6,
+ list_group_members/3, list_group_members/4,
+ list_groups/2, list_groups/3,
+ list_users/2, list_users/3,
+ remove_user/4, remove_user/5,
+ remove_group_member/5, remove_group_member/6,
+ remove_group/4, remove_group/5]).
+
+-export([store_directory_data/2]).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+
+
+store_directory_data(Directory, DirData) ->
+ %% We don't need to do anything here, we could ofcourse check that the appropriate
+ %% mnesia tables has been created prior to starting the http server.
+ ok.
+
+
+%%
+%% API
+%%
+
+%% Compability API
+
+
+store_user(UserName, Password, Port, Dir, AccessPassword) ->
+ %% AccessPassword is ignored - was not used in previous version
+ DirData = [{path,Dir},{port,Port}],
+ UStruct = #httpd_user{username = UserName,
+ password = Password},
+ add_user(DirData, UStruct).
+
+store_user(UserName, Password, Addr, Port, Dir, AccessPassword) ->
+ %% AccessPassword is ignored - was not used in previous version
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ UStruct = #httpd_user{username = UserName,
+ password = Password},
+ add_user(DirData, UStruct).
+
+store_group_member(GroupName, UserName, Port, Dir, AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ add_group_member(DirData, GroupName, UserName).
+
+store_group_member(GroupName, UserName, Addr, Port, Dir, AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ add_group_member(DirData, GroupName, UserName).
+
+list_group_members(GroupName, Port, Dir) ->
+ DirData = [{path,Dir},{port,Port}],
+ list_group_members(DirData, GroupName).
+
+list_group_members(GroupName, Addr, Port, Dir) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ list_group_members(DirData, GroupName).
+
+list_groups(Port, Dir) ->
+ DirData = [{path,Dir},{port,Port}],
+ list_groups(DirData).
+
+list_groups(Addr, Port, Dir) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ list_groups(DirData).
+
+list_users(Port, Dir) ->
+ DirData = [{path,Dir},{port,Port}],
+ list_users(DirData).
+
+list_users(Addr, Port, Dir) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ list_users(DirData).
+
+remove_user(UserName, Port, Dir, _AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ delete_user(DirData, UserName).
+
+remove_user(UserName, Addr, Port, Dir, _AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ delete_user(DirData, UserName).
+
+remove_group_member(GroupName,UserName,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ delete_group_member(DirData, GroupName, UserName).
+
+remove_group_member(GroupName,UserName,Addr,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ delete_group_member(DirData, GroupName, UserName).
+
+remove_group(GroupName,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ delete_group(DirData, GroupName).
+
+remove_group(GroupName,Addr,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ delete_group(DirData, GroupName).
+
+%%
+%% Storage format of users in the mnesia table:
+%% httpd_user records
+%%
+
+add_user(DirData, UStruct) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ UserName = UStruct#httpd_user.username,
+ Password = UStruct#httpd_user.password,
+ Data = UStruct#httpd_user.user_data,
+ User=#httpd_user{username={UserName,Addr,Port,Dir},
+ password=Password,
+ user_data=Data},
+ case mnesia:transaction(fun() -> mnesia:write(User) end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+get_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:read({httpd_user,
+ {UserName,Addr,Port,Dir}})
+ end) of
+ {aborted,Reason} ->
+ {error, Reason};
+ {'atomic',[]} ->
+ {error, no_such_user};
+ {'atomic', [Record]} when record(Record, httpd_user) ->
+ {ok, Record#httpd_user{username=UserName}};
+ Other ->
+ {error, no_such_user}
+ end.
+
+list_users(DirData) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:match_object({httpd_user,
+ {'_',Addr,Port,Dir},'_','_'})
+ end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ {'atomic',Users} ->
+ {ok,
+ lists:foldr(fun({httpd_user, {UserName, AnyAddr, AnyPort, AnyDir},
+ Password, Data}, Acc) ->
+ [UserName|Acc]
+ end,
+ [], Users)}
+ end.
+
+delete_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:delete({httpd_user,
+ {UserName,Addr,Port,Dir}})
+ end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+%%
+%% Storage of groups in the mnesia table:
+%% Multiple instances of {#httpd_group, User}
+%%
+
+add_group_member(DirData, GroupName, User) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ Group=#httpd_group{name={GroupName, Addr, Port, Dir}, userlist=User},
+ case mnesia:transaction(fun() -> mnesia:write(Group) end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+list_group_members(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:read({httpd_group,
+ {GroupName,Addr,Port,Dir}})
+ end) of
+ {aborted, Reason} ->
+ {error,Reason};
+ {'atomic', Members} ->
+ {ok,[UserName || {httpd_group,{AnyGroupName,AnyAddr,AnyPort,AnyDir},UserName} <- Members,
+ AnyGroupName == GroupName, AnyAddr == Addr,
+ AnyPort == Port, AnyDir == Dir]}
+ end.
+
+list_groups(DirData) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:match_object({httpd_group,
+ {'_',Addr,Port,Dir},'_'})
+ end) of
+ {aborted, Reason} ->
+ {error, Reason};
+ {'atomic', Groups} ->
+ GroupNames=
+ [GroupName || {httpd_group,{GroupName,AnyAddr,AnyPort,AnyDir}, UserName} <- Groups,
+ AnyAddr == Addr, AnyPort == AnyPort, AnyDir == Dir],
+ {ok, httpd_util:uniq(lists:sort(GroupNames))}
+ end.
+
+delete_group_member(DirData, GroupName, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ Group = #httpd_group{name={GroupName, Addr, Port, Dir}, userlist=UserName},
+ case mnesia:transaction(fun() -> mnesia:delete_object(Group) end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+%% THIS IS WRONG (?) !
+%% Should first match out all httpd_group records for this group and then
+%% do mnesia:delete on those. Or ?
+
+delete_group(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:delete({httpd_group,
+ {GroupName,Addr,Port,Dir}})
+ end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+%% Utility functions.
+
+lookup_common(DirData) ->
+ Dir = httpd_util:key1search(DirData, path),
+ Port = httpd_util:key1search(DirData, port),
+ Addr = httpd_util:key1search(DirData, bind_address),
+ {Addr, Port, Dir}.
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_plain.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_plain.erl
new file mode 100644
index 0000000000..2f92dcb446
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_plain.erl
@@ -0,0 +1,344 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_plain.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_auth_plain).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+-define(VMODULE,"AUTH_PLAIN").
+-include("httpd_verbosity.hrl").
+
+
+%% Internal API
+-export([store_directory_data/2]).
+
+
+-export([get_user/2,
+ list_group_members/2,
+ add_user/2,
+ add_group_member/3,
+ list_users/1,
+ delete_user/2,
+ list_groups/1,
+ delete_group_member/3,
+ delete_group/2,
+ remove/1]).
+
+%%
+%% API
+%%
+
+%%
+%% Storage format of users in the ets table:
+%% {UserName, Password, UserData}
+%%
+
+add_user(DirData, #httpd_user{username = User} = UStruct) ->
+ ?vtrace("add_user -> entry with:"
+ "~n User: ~p",[User]),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ Record = {User,
+ UStruct#httpd_user.password,
+ UStruct#httpd_user.user_data},
+ case ets:lookup(PWDB, User) of
+ [{User, _SomePassword, _SomeData}] ->
+ {error, user_already_in_db};
+ _ ->
+ ets:insert(PWDB, Record),
+ true
+ end.
+
+get_user(DirData, User) ->
+ ?vtrace("get_user -> entry with:"
+ "~n User: ~p",[User]),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:lookup(PWDB, User) of
+ [{User, PassWd, Data}] ->
+ {ok, #httpd_user{username=User, password=PassWd, user_data=Data}};
+ _ ->
+ {error, no_such_user}
+ end.
+
+list_users(DirData) ->
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:match(PWDB, '$1') of
+ Records when list(Records) ->
+ {ok, lists:foldr(fun({User,PassWd,Data}, A) -> [User|A] end,
+ [], lists:flatten(Records))};
+ O ->
+ {ok, []}
+ end.
+
+delete_user(DirData, UserName) ->
+ ?vtrace("delete_user -> entry with:"
+ "~n UserName: ~p",[UserName]),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:lookup(PWDB, UserName) of
+ [{UserName, SomePassword, SomeData}] ->
+ ets:delete(PWDB, UserName),
+ case list_groups(DirData) of
+ {ok,Groups}->
+ lists:foreach(fun(Group) ->
+ delete_group_member(DirData, Group, UserName)
+ end,Groups),
+ true;
+ _->
+ true
+ end;
+ _ ->
+ {error, no_such_user}
+ end.
+
+%%
+%% Storage of groups in the ets table:
+%% {Group, UserList} where UserList is a list of strings.
+%%
+
+add_group_member(DirData, Group, UserName) ->
+ ?DEBUG("add_group_members -> ~n"
+ " Group: ~p~n"
+ " UserName: ~p",[Group,UserName]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ case lists:member(UserName, Users) of
+ true ->
+ ?DEBUG("add_group_members -> already member in group",[]),
+ true;
+ false ->
+ ?DEBUG("add_group_members -> add",[]),
+ ets:insert(GDB, {Group, [UserName|Users]}),
+ true
+ end;
+ [] ->
+ ?DEBUG("add_group_members -> create grouo",[]),
+ ets:insert(GDB, {Group, [UserName]}),
+ true;
+ Other ->
+ ?ERROR("add_group_members -> Other: ~p",[Other]),
+ {error, Other}
+ end.
+
+list_group_members(DirData, Group) ->
+ ?DEBUG("list_group_members -> Group: ~p",[Group]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ ?DEBUG("list_group_members -> Users: ~p",[Users]),
+ {ok, Users};
+ _ ->
+ {error, no_such_group}
+ end.
+
+list_groups(DirData) ->
+ ?DEBUG("list_groups -> entry",[]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:match(GDB, '$1') of
+ [] ->
+ ?DEBUG("list_groups -> []",[]),
+ {ok, []};
+ Groups0 when list(Groups0) ->
+ ?DEBUG("list_groups -> Groups0: ~p",[Groups0]),
+ {ok, httpd_util:uniq(lists:foldr(fun({G, U}, A) -> [G|A] end,
+ [], lists:flatten(Groups0)))};
+ _ ->
+ {ok, []}
+ end.
+
+delete_group_member(DirData, Group, User) ->
+ ?DEBUG("list_group_members -> ~n"
+ " Group: ~p~n"
+ " User: ~p",[Group,User]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ UDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] when list(Users) ->
+ case lists:member(User, Users) of
+ true ->
+ ?DEBUG("list_group_members -> deleted from group",[]),
+ ets:delete(GDB, Group),
+ ets:insert(GDB, {Group, lists:delete(User, Users)}),
+ true;
+ false ->
+ ?DEBUG("list_group_members -> not member",[]),
+ {error, no_such_group_member}
+ end;
+ _ ->
+ ?ERROR("list_group_members -> no such group",[]),
+ {error, no_such_group}
+ end.
+
+delete_group(DirData, Group) ->
+ ?DEBUG("list_group_members -> Group: ~p",[Group]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ ?DEBUG("list_group_members -> delete",[]),
+ ets:delete(GDB, Group),
+ true;
+ _ ->
+ ?ERROR("delete_group -> no such group",[]),
+ {error, no_such_group}
+ end.
+
+
+store_directory_data(Directory, DirData) ->
+ PWFile = httpd_util:key1search(DirData, auth_user_file),
+ GroupFile = httpd_util:key1search(DirData, auth_group_file),
+ case load_passwd(PWFile) of
+ {ok, PWDB} ->
+ case load_group(GroupFile) of
+ {ok, GRDB} ->
+ %% Address and port is included in the file names...
+ Addr = httpd_util:key1search(DirData, bind_address),
+ Port = httpd_util:key1search(DirData, port),
+ {ok, PasswdDB} = store_passwd(Addr,Port,PWDB),
+ {ok, GroupDB} = store_group(Addr,Port,GRDB),
+ NDD1 = lists:keyreplace(auth_user_file, 1, DirData,
+ {auth_user_file, PasswdDB}),
+ NDD2 = lists:keyreplace(auth_group_file, 1, NDD1,
+ {auth_group_file, GroupDB}),
+ {ok, NDD2};
+ Err ->
+ ?ERROR("failed storing directory data: "
+ "load group error: ~p",[Err]),
+ {error, Err}
+ end;
+ Err2 ->
+ ?ERROR("failed storing directory data: "
+ "load passwd error: ~p",[Err2]),
+ {error, Err2}
+ end.
+
+
+
+%% load_passwd
+
+load_passwd(AuthUserFile) ->
+ case file:open(AuthUserFile, [read]) of
+ {ok,Stream} ->
+ parse_passwd(Stream, []);
+ {error, _} ->
+ {error, ?NICE("Can't open "++AuthUserFile)}
+ end.
+
+parse_passwd(Stream,PasswdList) ->
+ Line =
+ case io:get_line(Stream, '') of
+ eof ->
+ eof;
+ String ->
+ httpd_conf:clean(String)
+ end,
+ parse_passwd(Stream, PasswdList, Line).
+
+parse_passwd(Stream, PasswdList, eof) ->
+ file:close(Stream),
+ {ok, PasswdList};
+parse_passwd(Stream, PasswdList, "") ->
+ parse_passwd(Stream, PasswdList);
+parse_passwd(Stream, PasswdList, [$#|_]) ->
+ parse_passwd(Stream, PasswdList);
+parse_passwd(Stream, PasswdList, Line) ->
+ case regexp:split(Line,":") of
+ {ok, [User,Password]} ->
+ parse_passwd(Stream, [{User,Password, []}|PasswdList]);
+ {ok,_} ->
+ {error, ?NICE(Line)}
+ end.
+
+%% load_group
+
+load_group(AuthGroupFile) ->
+ case file:open(AuthGroupFile, [read]) of
+ {ok, Stream} ->
+ parse_group(Stream,[]);
+ {error, _} ->
+ {error, ?NICE("Can't open "++AuthGroupFile)}
+ end.
+
+parse_group(Stream, GroupList) ->
+ Line=
+ case io:get_line(Stream,'') of
+ eof ->
+ eof;
+ String ->
+ httpd_conf:clean(String)
+ end,
+ parse_group(Stream, GroupList, Line).
+
+parse_group(Stream, GroupList, eof) ->
+ file:close(Stream),
+ {ok, GroupList};
+parse_group(Stream, GroupList, "") ->
+ parse_group(Stream, GroupList);
+parse_group(Stream, GroupList, [$#|_]) ->
+ parse_group(Stream, GroupList);
+parse_group(Stream, GroupList, Line) ->
+ case regexp:split(Line, ":") of
+ {ok, [Group,Users]} ->
+ {ok, UserList} = regexp:split(Users," "),
+ parse_group(Stream, [{Group,UserList}|GroupList]);
+ {ok, _} ->
+ {error, ?NICE(Line)}
+ end.
+
+
+%% store_passwd
+
+store_passwd(Addr,Port,PasswdList) ->
+ Name = httpd_util:make_name("httpd_passwd",Addr,Port),
+ PasswdDB = ets:new(Name, [set, public]),
+ store_passwd(PasswdDB, PasswdList).
+
+store_passwd(PasswdDB, []) ->
+ {ok, PasswdDB};
+store_passwd(PasswdDB, [User|Rest]) ->
+ ets:insert(PasswdDB, User),
+ store_passwd(PasswdDB, Rest).
+
+%% store_group
+
+store_group(Addr,Port,GroupList) ->
+ Name = httpd_util:make_name("httpd_group",Addr,Port),
+ GroupDB = ets:new(Name, [set, public]),
+ store_group(GroupDB, GroupList).
+
+
+store_group(GroupDB,[]) ->
+ {ok, GroupDB};
+store_group(GroupDB,[User|Rest]) ->
+ ets:insert(GroupDB, User),
+ store_group(GroupDB, Rest).
+
+
+%% remove/1
+%%
+%% Deletes ets tables used by this auth mod.
+%%
+remove(DirData) ->
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ ets:delete(PWDB),
+ ets:delete(GDB).
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_server.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_server.erl
new file mode 100644
index 0000000000..6694ed7eac
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_auth_server.erl
@@ -0,0 +1,424 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_server.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+
+-module(mod_auth_server).
+
+-include("httpd.hrl").
+%% -include("mod_auth.hrl").
+-include("httpd_verbosity.hrl").
+
+-behaviour(gen_server).
+
+
+%% mod_auth exports
+-export([start/2, stop/2,
+ add_password/4, update_password/5,
+ add_user/5, delete_user/5, get_user/5, list_users/4,
+ add_group_member/6, delete_group_member/6, list_group_members/5,
+ delete_group/5, list_groups/4]).
+
+%% Management exports
+-export([verbosity/3]).
+
+%% gen_server exports
+-export([start_link/3,
+ init/1,
+ handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+
+-record(state,{tab}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% External API %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% start_link/3
+%%
+%% NOTE: This is called by httpd_misc_sup when the process is started
+%%
+start_link(Addr, Port, Verbosity)->
+ ?vlog("start_link -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ gen_server:start_link({local, Name}, ?MODULE, [Verbosity],
+ [{timeout, infinity}]).
+
+
+%% start/2
+
+start(Addr, Port)->
+ ?vtrace("start -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ Verbosity = get(auth_verbosity),
+ case (catch httpd_misc_sup:start_auth_server(Addr, Port,
+ Verbosity)) of
+ {ok, Pid} ->
+ put(auth_server, Pid),
+ ok;
+ {error, Reason} ->
+ exit({failed_start_auth_server, Reason});
+ Error ->
+ exit({failed_start_auth_server, Error})
+ end;
+ _ -> %% Already started...
+ ok
+ end.
+
+
+%% stop/2
+
+stop(Addr, Port)->
+ ?vtrace("stop -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined -> %% Already stopped
+ ok;
+ _ ->
+ (catch httpd_misc_sup:stop_auth_server(Addr, Port))
+ end.
+
+
+%% verbosity/3
+
+verbosity(Addr, Port, Verbosity) ->
+ Name = make_name(Addr, Port),
+ Req = {verbosity, Verbosity},
+ call(Name, Req).
+
+
+%% add_password/4
+
+add_password(Addr, Port, Dir, Password)->
+ Name = make_name(Addr, Port),
+ Req = {add_password, Dir, Password},
+ call(Name, Req).
+
+
+%% update_password/6
+
+update_password(Addr, Port, Dir, Old, New) when list(New) ->
+ Name = make_name(Addr, Port),
+ Req = {update_password, Dir, Old, New},
+ call(Name, Req).
+
+
+%% add_user/5
+
+add_user(Addr, Port, Dir, User, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {add_user, Addr, Port, Dir, User, Password},
+ call(Name, Req).
+
+
+%% delete_user/5
+
+delete_user(Addr, Port, Dir, UserName, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {delete_user, Addr, Port, Dir, UserName, Password},
+ call(Name, Req).
+
+
+%% get_user/5
+
+get_user(Addr, Port, Dir, UserName, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {get_user, Addr, Port, Dir, UserName, Password},
+ call(Name, Req).
+
+
+%% list_users/4
+
+list_users(Addr, Port, Dir, Password) ->
+ Name = make_name(Addr,Port),
+ Req = {list_users, Addr, Port, Dir, Password},
+ call(Name, Req).
+
+
+%% add_group_member/6
+
+add_group_member(Addr, Port, Dir, GroupName, UserName, Password) ->
+ Name = make_name(Addr,Port),
+ Req = {add_group_member, Addr, Port, Dir, GroupName, UserName, Password},
+ call(Name, Req).
+
+
+%% delete_group_member/6
+
+delete_group_member(Addr, Port, Dir, GroupName, UserName, Password) ->
+ Name = make_name(Addr,Port),
+ Req = {delete_group_member, Addr, Port, Dir, GroupName, UserName, Password},
+ call(Name, Req).
+
+
+%% list_group_members/4
+
+list_group_members(Addr, Port, Dir, Group, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {list_group_members, Addr, Port, Dir, Group, Password},
+ call(Name, Req).
+
+
+%% delete_group/5
+
+delete_group(Addr, Port, Dir, GroupName, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {delete_group, Addr, Port, Dir, GroupName, Password},
+ call(Name, Req).
+
+
+%% list_groups/4
+
+list_groups(Addr, Port, Dir, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {list_groups, Addr, Port, Dir, Password},
+ call(Name, Req).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Server call-back functions %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% init
+
+init([undefined]) ->
+ init([?default_verbosity]);
+
+init([Verbosity]) ->
+ put(sname,auth),
+ put(verbosity,Verbosity),
+ ?vlog("starting",[]),
+ {ok,#state{tab = ets:new(auth_pwd,[set,protected])}}.
+
+
+%% handle_call
+
+%% Add a user
+handle_call({add_user, Addr, Port, Dir, User, AuthPwd}, _From, State) ->
+ Reply = api_call(Addr, Port, Dir, add_user, User, AuthPwd, State),
+ {reply, Reply, State};
+
+%% Get data about a user
+handle_call({get_user, Addr, Port, Dir, User, AuthPwd}, _From, State) ->
+ Reply = api_call(Addr, Port, Dir, get_user, [User], AuthPwd, State),
+ {reply, Reply, State};
+
+%% Add a group member
+handle_call({add_group_member, Addr, Port, Dir, Group, User, AuthPwd},
+ _From, State) ->
+ Reply = api_call(Addr, Port, Dir, add_group_member, [Group, User],
+ AuthPwd, State),
+ {reply, Reply, State};
+
+%% delete a group
+handle_call({delete_group_member, Addr, Port, Dir, Group, User, AuthPwd},
+ _From, State)->
+ Reply = api_call(Addr, Port, Dir, delete_group_member, [Group, User],
+ AuthPwd, State),
+ {reply, Reply, State};
+
+%% List all users thats standalone users
+handle_call({list_users, Addr, Port, Dir, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, list_users, [], AuthPwd, State),
+ {reply, Reply, State};
+
+%% Delete a user
+handle_call({delete_user, Addr, Port, Dir, User, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, delete_user, [User], AuthPwd, State),
+ {reply, Reply, State};
+
+%% Delete a group
+handle_call({delete_group, Addr, Port, Dir, Group, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, delete_group, [Group], AuthPwd, State),
+ {reply, Reply, State};
+
+%% List the current groups
+handle_call({list_groups, Addr, Port, Dir, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, list_groups, [], AuthPwd, State),
+ {reply, Reply, State};
+
+%% List the members of the given group
+handle_call({list_group_members, Addr, Port, Dir, Group, AuthPwd},
+ _From, State)->
+ Reply = api_call(Addr, Port, Dir, list_group_members, [Group],
+ AuthPwd, State),
+ {reply, Reply, State};
+
+
+%% Add password for a directory
+handle_call({add_password, Dir, Password}, _From, State)->
+ Reply = do_add_password(Dir, Password, State),
+ {reply, Reply, State};
+
+
+%% Update the password for a directory
+
+handle_call({update_password, Dir, Old, New},_From,State)->
+ Reply =
+ case getPassword(State, Dir) of
+ OldPwd when binary(OldPwd)->
+ case erlang:md5(Old) of
+ OldPwd ->
+ %% The old password is right =>
+ %% update the password to the new
+ do_update_password(Dir,New,State),
+ ok;
+ _->
+ {error, error_new}
+ end;
+ _->
+ {error, error_old}
+ end,
+ {reply, Reply, State};
+
+handle_call(stop, _From, State)->
+ {stop, normal, State};
+
+handle_call({verbosity,Verbosity},_From,State)->
+ OldVerbosity = put(verbosity,Verbosity),
+ ?vlog("set verbosity: ~p -> ~p",[Verbosity,OldVerbosity]),
+ {reply,OldVerbosity,State}.
+
+handle_info(Info,State)->
+ {noreply,State}.
+
+handle_cast(Request,State)->
+ {noreply,State}.
+
+
+terminate(Reason,State) ->
+ ets:delete(State#state.tab),
+ ok.
+
+
+%% code_change({down, ToVsn}, State, Extra)
+%%
+code_change({down, _}, #state{tab = Tab}, downgrade_to_2_6_0) ->
+ ?vlog("downgrade to 2.6.0", []),
+ {ok, {state, Tab, undefined}};
+
+
+%% code_change(FromVsn, State, Extra)
+%%
+code_change(_, {state, Tab, _}, upgrade_from_2_6_0) ->
+ ?vlog("upgrade from 2.6.0", []),
+ {ok, #state{tab = Tab}}.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The functions that really changes the data in the database %%
+%% of users to different directories %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% API gateway
+
+api_call(Addr, Port, Dir, Func, Args,Password,State) ->
+ case controlPassword(Password,State,Dir) of
+ ok->
+ ConfigName = httpd_util:make_name("httpd_conf",Addr,Port),
+ case ets:match_object(ConfigName, {directory, Dir, '$1'}) of
+ [{directory, Dir, DirData}] ->
+ AuthMod = auth_mod_name(DirData),
+ ?DEBUG("api_call -> call ~p:~p",[AuthMod,Func]),
+ Ret = (catch apply(AuthMod, Func, [DirData|Args])),
+ ?DEBUG("api_call -> Ret: ~p",[ret]),
+ Ret;
+ O ->
+ ?DEBUG("api_call -> O: ~p",[O]),
+ {error, no_such_directory}
+ end;
+ bad_password ->
+ {error,bad_password}
+ end.
+
+controlPassword(Password,State,Dir)when Password=:="DummyPassword"->
+ bad_password;
+
+controlPassword(Password,State,Dir)->
+ case getPassword(State,Dir) of
+ Pwd when binary(Pwd)->
+ case erlang:md5(Password) of
+ Pwd ->
+ ok;
+ _->
+ bad_password
+ end;
+ _ ->
+ bad_password
+ end.
+
+
+getPassword(State,Dir)->
+ case lookup(State#state.tab, Dir) of
+ [{_,Pwd}]->
+ Pwd;
+ _ ->
+ {error,bad_password}
+ end.
+
+do_update_password(Dir, New, State) ->
+ ets:insert(State#state.tab, {Dir, erlang:md5(New)}).
+
+do_add_password(Dir, Password, State) ->
+ case getPassword(State,Dir) of
+ PwdExists when binary(PwdExists) ->
+ {error, dir_protected};
+ {error, _} ->
+ do_update_password(Dir, Password, State)
+ end.
+
+
+auth_mod_name(DirData) ->
+ case httpd_util:key1search(DirData, auth_type, plain) of
+ plain -> mod_auth_plain;
+ mnesia -> mod_auth_mnesia;
+ dets -> mod_auth_dets
+ end.
+
+
+lookup(Db, Key) ->
+ ets:lookup(Db, Key).
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_auth",Addr,Port).
+
+
+call(Name, Req) ->
+ case (catch gen_server:call(Name, Req)) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Reply ->
+ Reply
+ end.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_browser.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_browser.erl
new file mode 100644
index 0000000000..62ffba0e5b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_browser.erl
@@ -0,0 +1,214 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_browser.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+%% ----------------------------------------------------------------------
+%%
+%% Browsers sends a string to the webbserver
+%% to identify themsevles. They are a bit nasty
+%% since the only thing that the specification really
+%% is strict about is that they shall be short
+%% tree axamples:
+%%
+%% Netscape Mozilla/4.75 [en] (X11; U; SunOS 5.8 sun4u)
+%% IE5 Mozilla/4.0 (compatible; MSIE 5.0; SP1B; SunOS 5.8 sun4u; X11)
+%% Lynx Lynx/2.8.3rel.1 libwww-FM/2.142
+%%
+%% ----------------------------------------------------------------------
+
+-module(mod_browser).
+
+%% Remember that the order of the mozilla browsers are
+%% important since some browsers include others to behave
+%% as they were something else
+-define(MOZILLA_BROWSERS,[{opera,"opera"},{msie,"msie"}]).
+
+
+%% If your operatingsystem is not recognized add it to this list.
+-define(OPERATIVE_SYSTEMS,[{win3x,["win16","windows 3","windows 16-bit"]},
+ {win95,["win95","windows 95"]},
+ {win98,["win98", "windows 98"]},
+ {winnt,["winnt", "windows nt"]},
+ {win2k,["nt 5"]},
+ {sunos4,["sunos 4"]},
+ {sunos5,["sunos 5"]},
+ {sun,["sunos"]},
+ {aix,["aix"]},
+ {linux,["linux"]},
+ {sco,["sco","unix_sv"]},
+ {freebsd,["freebsd"]},
+ {bsd,["bsd"]}]).
+
+-define(LYNX,lynx).
+-define(MOZILLA,mozilla).
+-define(EMACS,emacs).
+-define(STAROFFICE,soffice).
+-define(MOSAIC,mosaic).
+-define(NETSCAPE,netscape).
+-define(UNKOWN,unknown).
+
+-include("httpd.hrl").
+
+-export([do/1, test/0, getBrowser/1]).
+
+
+do(Info) ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ {Status_code,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ undefined ->
+ {proceed,[{'user-agent',getBrowser1(Info)}|Info#mod.data]}
+ end.
+
+getBrowser1(Info) ->
+ PHead=Info#mod.parsed_header,
+ case httpd_util:key1search(PHead,"User-Agent") of
+ undefined->
+ undefined;
+ AgentString ->
+ getBrowser(AgentString)
+ end.
+
+getBrowser(AgentString) ->
+ LAgentString = httpd_util:to_lower(AgentString),
+ case regexp:first_match(LAgentString,"^[^ ]*") of
+ {match,Start,Length} ->
+ Browser=lists:sublist(LAgentString,Start,Length),
+ case browserType(Browser) of
+ {mozilla,Vsn} ->
+ {getMozilla(LAgentString,
+ ?MOZILLA_BROWSERS,{?NETSCAPE,Vsn}),
+ operativeSystem(LAgentString)};
+ AnyBrowser ->
+ {AnyBrowser,operativeSystem(LAgentString)}
+ end;
+ nomatch ->
+ browserType(LAgentString)
+ end.
+
+browserType([$l,$y,$n,$x|Version]) ->
+ {?LYNX,browserVersion(Version)};
+browserType([$m,$o,$z,$i,$l,$l,$a|Version]) ->
+ {?MOZILLA,browserVersion(Version)};
+browserType([$e,$m,$a,$c,$s|Version]) ->
+ {?EMACS,browserVersion(Version)};
+browserType([$e,$t,$a,$r,$o,$f,$f,$i,$c,$e|Version]) ->
+ {?STAROFFICE,browserVersion(Version)};
+browserType([$m,$o,$s,$a,$i,$c|Version]) ->
+ {?MOSAIC,browserVersion(Version)};
+browserType(Unknown)->
+ unknown.
+
+
+browserVersion([$/|VsnString]) ->
+ case catch list_to_float(VsnString) of
+ Number when float(Number) ->
+ Number;
+ Whatever ->
+ case string:span(VsnString,"1234567890.") of
+ 0 ->
+ unknown;
+ VLength ->
+ Vsn = string:substr(VsnString,1,VLength),
+ case string:tokens(Vsn,".") of
+ [Number] ->
+ list_to_float(Number++".0");
+ [Major,Minor|_MinorMinor] ->
+ list_to_float(Major++"."++Minor)
+ end
+ end
+ end;
+browserVersion(VsnString) ->
+ browserVersion([$/|VsnString]).
+
+operativeSystem(OpString) ->
+ operativeSystem(OpString, ?OPERATIVE_SYSTEMS).
+
+operativeSystem(OpString,[]) ->
+ unknown;
+operativeSystem(OpString,[{RetVal,RegExps}|Rest]) ->
+ case controlOperativeSystem(OpString,RegExps) of
+ true->
+ RetVal;
+ _ ->
+ operativeSystem(OpString,Rest)
+ end.
+
+controlOperativeSystem(OpString,[]) ->
+ false;
+controlOperativeSystem(OpString,[Regexp|Regexps]) ->
+ case regexp:match(OpString,Regexp) of
+ {match,_,_}->
+ true;
+ nomatch->
+ controlOperativeSystem(OpString,Regexps)
+ end.
+
+
+%% OK this is ugly but thats the only way since
+%% all browsers dont conform to the name/vsn standard
+%% First we check if it is one of the browsers that
+%% not are the default mozillaborwser against the regexp
+%% for the different browsers. if no match it a mozilla
+%% browser i.e opera netscape or internet explorer
+
+getMozilla(AgentString,[],Default) ->
+ Default;
+getMozilla(AgentString,[{Agent,AgentRegExp}|Rest],Default) ->
+ case regexp:match(AgentString,AgentRegExp) of
+ {match,_,_} ->
+ {Agent,getVersion(AgentString,AgentRegExp)};
+ nomatch ->
+ getMozilla(AgentString,Rest,Default)
+ end.
+
+getVersion(AgentString,AgentRegExp) ->
+ case regexp:match(AgentString,AgentRegExp++"[0-9\.\ ]*") of
+ {match,Start,Length} when length(AgentRegExp) < Length ->
+ %% Ok we got the number split it out
+ RealStart=Start+length(AgentRegExp),
+ RealLength=Length-length(AgentRegExp),
+ VsnString=string:substr(AgentString,RealStart,RealLength),
+ case string:strip(VsnString,both,$\ ) of
+ [] ->
+ unknown;
+ Vsn ->
+ case string:tokens(Vsn,".") of
+ [Number]->
+ list_to_float(Number++".0");
+ [Major,Minor|_MinorMinor]->
+ list_to_float(Major++"."++Minor)
+ end
+ end;
+ nomatch ->
+ unknown
+ end.
+
+
+test()->
+ io:format("~n--------------------------------------------------------~n"),
+ Res1=getBrowser("Mozilla/4.75 [en] (X11; U; SunOS 5.8 sun4u)"),
+ io:format("~p",[Res1]),
+ io:format("~n--------------------------------------------------------~n"),
+ io:format("~n--------------------------------------------------------~n"),
+ Res2=getBrowser("Mozilla/4.0 (compatible; MSIE 5.0; SP1B; SunOS 5.8 sun4u; X11)"),
+ io:format("~p",[Res2]),
+ io:format("~n--------------------------------------------------------~n"),
+ io:format("~n--------------------------------------------------------~n"),
+ Res3=getBrowser("Lynx/2.8.3rel.1 libwww-FM/2.142"),
+ io:format("~p",[Res3]),
+ io:format("~n--------------------------------------------------------~n").
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_cgi.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_cgi.erl
new file mode 100644
index 0000000000..d9070b8860
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_cgi.erl
@@ -0,0 +1,694 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_cgi.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_cgi).
+-export([do/1,env/3,status_code/1,load/2]).
+
+%%Exports to the interface for sending chunked data
+%% to http/1.1 users and full responses to http/1.0
+-export([send/5,final_send/4, update_status_code/2,get_new_size/2]).
+-include("httpd.hrl").
+
+-define(VMODULE,"CGI").
+-include("httpd_verbosity.hrl").
+
+-define(GATEWAY_INTERFACE,"CGI/1.1").
+-define(DEFAULT_CGI_TIMEOUT,15000).
+
+%% do
+
+do(Info) ->
+ ?vtrace("do",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode, PhraseArgs, Reason} ->
+ {proceed, Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ ?vtrace("do -> no status code has been generated", []),
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ ?vtrace("do -> no response has been generated", []),
+ RequestURI =
+ case httpd_util:key1search(Info#mod.data,
+ new_request_uri) of
+ undefined ->
+ Info#mod.request_uri;
+ Value ->
+ Value
+ end,
+ ?vtrace("do -> RequestURI: ~p", [RequestURI]),
+ ScriptAliases =
+ httpd_util:multi_lookup(Info#mod.config_db,
+ script_alias),
+ ?vtrace("do -> ScriptAliases: ~p", [ScriptAliases]),
+ case mod_alias:real_script_name(Info#mod.config_db,
+ RequestURI,
+ ScriptAliases) of
+ {Script, AfterScript} ->
+ exec_script(Info, Script, AfterScript, RequestURI);
+ not_a_script ->
+ {proceed,Info#mod.data}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+
+%% is_executable(File) ->
+%% ?DEBUG("is_executable -> entry with~n"
+%% " File: ~s",[File]),
+%% Dir = filename:dirname(File),
+%% FileName = filename:basename(File),
+%% is_executable(FileName,Dir).
+%%
+%% is_executable(FileName,Dir) ->
+%% ?DEBUG("is_executable -> entry with~n"
+%% " Dir: ~s~n"
+%% " FileName: ~s",[Dir,FileName]),
+%% case os:find_executable(FileName, Dir) of
+%% false ->
+%% false;
+%% _ ->
+%% true
+%% end.
+
+
+%% -------------------------
+%% Start temporary (hopefully) fix for win32
+%% OTP-3627
+%%
+
+is_executable(File) ->
+ Dir = filename:dirname(File),
+ FileName = filename:basename(File),
+ case os:type() of
+ {win32,_} ->
+ is_win32_executable(Dir,FileName);
+ _ ->
+ is_other_executable(Dir,FileName)
+ end.
+
+
+is_win32_executable(D,F) ->
+ case ends_with(F,[".bat",".exe",".com"]) of
+ false ->
+ %% This is why we cant use 'os:find_executable' directly.
+ %% It assumes that executable files is given without extension
+ case os:find_executable(F,D) of
+ false ->
+ false;
+ _ ->
+ true
+ end;
+ true ->
+ case file:read_file_info(D ++ "/" ++ F) of
+ {ok,_} ->
+ true;
+ _ ->
+ false
+ end
+ end.
+
+
+is_other_executable(D,F) ->
+ case os:find_executable(F,D) of
+ false ->
+ false;
+ _ ->
+ true
+ end.
+
+
+ends_with(File,[]) ->
+ false;
+ends_with(File,[Ext|Rest]) ->
+ case ends_with1(File,Ext) of
+ true ->
+ true;
+ false ->
+ ends_with(File,Rest)
+ end.
+
+ends_with1(S,E) when length(S) >= length(E) ->
+ case to_lower(string:right(S,length(E))) of
+ E ->
+ true;
+ _ ->
+ false
+ end;
+ends_with1(_S,_E) ->
+ false.
+
+
+to_lower(S) -> to_lower(S,[]).
+
+to_lower([],L) -> lists:reverse(L);
+to_lower([H|T],L) -> to_lower(T,[to_lower1(H)|L]).
+
+to_lower1(C) when C >= $A, C =< $Z ->
+ C + ($a - $A);
+to_lower1(C) ->
+ C.
+
+%%
+%% End fix
+%% ---------------------------------
+
+
+env(VarName, Value) ->
+ {VarName, Value}.
+
+env(Info, Script, AfterScript) ->
+ ?vtrace("env -> entry with"
+ "~n Script: ~p"
+ "~n AfterScript: ~p",
+ [Script, AfterScript]),
+ {_, RemoteAddr} = (Info#mod.init_data)#init_data.peername,
+ ServerName = (Info#mod.init_data)#init_data.resolve,
+ PH = parsed_header(Info#mod.parsed_header),
+ Env =
+ [env("SERVER_SOFTWARE",?SERVER_SOFTWARE),
+ env("SERVER_NAME",ServerName),
+ env("GATEWAY_INTERFACE",?GATEWAY_INTERFACE),
+ env("SERVER_PROTOCOL",?SERVER_PROTOCOL),
+ env("SERVER_PORT",
+ integer_to_list(httpd_util:lookup(Info#mod.config_db,port,80))),
+ env("REQUEST_METHOD",Info#mod.method),
+ env("REMOTE_ADDR",RemoteAddr),
+ env("SCRIPT_NAME",Script)],
+ Env1 =
+ case Info#mod.method of
+ "GET" ->
+ case AfterScript of
+ {[], QueryString} ->
+ [env("QUERY_STRING", QueryString)|Env];
+ {PathInfo, []} ->
+ Aliases = httpd_util:multi_lookup(
+ Info#mod.config_db,alias),
+ {_, PathTranslated, _} =
+ mod_alias:real_name(
+ Info#mod.config_db, PathInfo, Aliases),
+ [Env|
+ [env("PATH_INFO","/"++httpd_util:decode_hex(PathInfo)),
+ env("PATH_TRANSLATED",PathTranslated)]];
+ {PathInfo, QueryString} ->
+ Aliases = httpd_util:multi_lookup(
+ Info#mod.config_db,alias),
+ {_, PathTranslated, _} =
+ mod_alias:real_name(
+ Info#mod.config_db, PathInfo, Aliases),
+ [Env|
+ [env("PATH_INFO",
+ httpd_util:decode_hex(PathInfo)),
+ env("PATH_TRANSLATED",PathTranslated),
+ env("QUERY_STRING", QueryString)]];
+ [] ->
+ Env
+ end;
+ "POST" ->
+ [env("CONTENT_LENGTH",
+ integer_to_list(httpd_util:flatlength(
+ Info#mod.entity_body)))|Env];
+ _ ->
+ Env
+ end,
+ Env2 =
+ case httpd_util:key1search(Info#mod.data,remote_user) of
+ undefined ->
+ Env1;
+ RemoteUser ->
+ [env("REMOTE_USER",RemoteUser)|Env1] %% OTP-4416
+ end,
+ lists:flatten([Env2|PH]).
+
+
+parsed_header(List) ->
+ parsed_header(List, []).
+
+parsed_header([], SoFar) ->
+ SoFar;
+parsed_header([{Name,[Value|R1]}|R2], SoFar) when list(Value)->
+ NewName=lists:map(fun(X) -> if X == $- -> $_; true -> X end end,Name),
+ Env = env("HTTP_"++httpd_util:to_upper(NewName),
+ multi_value([Value|R1])),
+ parsed_header(R2, [Env|SoFar]);
+
+parsed_header([{Name,Value}|Rest], SoFar) ->
+ {ok,NewName,_} = regexp:gsub(Name, "-", "_"),
+ Env=env("HTTP_"++httpd_util:to_upper(NewName),Value),
+ parsed_header(Rest, [Env|SoFar]).
+
+
+multi_value([]) ->
+ [];
+multi_value([Value]) ->
+ Value;
+multi_value([Value|Rest]) ->
+ Value++", "++multi_value(Rest).
+
+
+exec_script(Info, Script, AfterScript, RequestURI) ->
+ ?vdebug("exec_script -> entry with"
+ "~n Script: ~p"
+ "~n AfterScript: ~p",
+ [Script,AfterScript]),
+ exec_script(is_executable(Script),Info,Script,AfterScript,RequestURI).
+
+exec_script(true, Info, Script, AfterScript, RequestURI) ->
+ ?vtrace("exec_script -> entry when script is executable",[]),
+ process_flag(trap_exit,true),
+ Dir = filename:dirname(Script),
+ [Script_Name|_] = string:tokens(RequestURI, "?"),
+ Env = env(Info, Script_Name, AfterScript),
+ Port = (catch open_port({spawn,Script},[stream,{cd, Dir},{env, Env}])),
+ ?vtrace("exec_script -> Port: ~w",[Port]),
+ case Port of
+ P when port(P) ->
+ %% Send entity_body to port.
+ Res = case Info#mod.entity_body of
+ [] ->
+ true;
+ EntityBody ->
+ (catch port_command(Port, EntityBody))
+ end,
+ case Res of
+ {'EXIT',Reason} ->
+ ?vlog("port send failed:"
+ "~n Port: ~p"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Port,Info#mod.request_uri,Reason]),
+ exit({open_cmd_failed,Reason,
+ [{mod,?MODULE},{port,Port},
+ {uri,Info#mod.request_uri},
+ {script,Script},{env,Env},{dir,Dir},
+ {ebody_size,sz(Info#mod.entity_body)}]});
+ true ->
+ proxy(Info, Port)
+ end;
+ {'EXIT',Reason} ->
+ ?vlog("open port failed: exit"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Info#mod.request_uri,Reason]),
+ exit({open_port_failed,Reason,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]});
+ O ->
+ ?vlog("open port failed: unknown result"
+ "~n URI: ~p"
+ "~n O: ~p",
+ [Info#mod.request_uri,O]),
+ exit({open_port_failed,O,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]})
+ end;
+
+exec_script(false,Info,Script,_AfterScript,_RequestURI) ->
+ ?vlog("script ~s not executable",[Script]),
+ {proceed,
+ [{status,
+ {404,Info#mod.request_uri,
+ ?NICE("You don't have permission to execute " ++
+ Info#mod.request_uri ++ " on this server")}}|
+ Info#mod.data]}.
+
+
+
+%%
+%% Socket <-> Port communication
+%%
+
+proxy(#mod{config_db = ConfigDb} = Info, Port) ->
+ Timeout = httpd_util:lookup(ConfigDb, cgi_timeout, ?DEFAULT_CGI_TIMEOUT),
+ proxy(Info, Port, 0, undefined,[], Timeout).
+
+proxy(Info, Port, Size, StatusCode, AccResponse, Timeout) ->
+ ?vdebug("proxy -> entry with"
+ "~n Size: ~p"
+ "~n StatusCode ~p"
+ "~n Timeout: ~p",
+ [Size, StatusCode, Timeout]),
+ receive
+ {Port, {data, Response}} when port(Port) ->
+ ?vtrace("proxy -> got some data from the port",[]),
+
+ NewStatusCode = update_status_code(StatusCode, Response),
+
+ ?vtrace("proxy -> NewStatusCode: ~p",[NewStatusCode]),
+ case send(Info, NewStatusCode, Response, Size, AccResponse) of
+ socket_closed ->
+ ?vtrace("proxy -> socket closed: kill port",[]),
+ (catch port_close(Port)), % KILL the port !!!!
+ process_flag(trap_exit,false),
+ {proceed,
+ [{response,{already_sent,200,Size}}|Info#mod.data]};
+
+ head_sent ->
+ ?vtrace("proxy -> head sent: kill port",[]),
+ (catch port_close(Port)), % KILL the port !!!!
+ process_flag(trap_exit,false),
+ {proceed,
+ [{response,{already_sent,200,Size}}|Info#mod.data]};
+
+ {http_response, NewAccResponse} ->
+ ?vtrace("proxy -> head response: continue",[]),
+ NewSize = get_new_size(Size, Response),
+ proxy(Info, Port, NewSize, NewStatusCode,
+ NewAccResponse, Timeout);
+
+ _ ->
+ ?vtrace("proxy -> continue",[]),
+ %% The data is sent and the socket is not closed, continue
+ NewSize = get_new_size(Size, Response),
+ proxy(Info, Port, NewSize, NewStatusCode,
+ "nonempty", Timeout)
+ end;
+
+ {'EXIT', Port, normal} when port(Port) ->
+ ?vtrace("proxy -> exit signal from port: normal",[]),
+ NewStatusCode = update_status_code(StatusCode,AccResponse),
+ final_send(Info,NewStatusCode,Size,AccResponse),
+ process_flag(trap_exit,false),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]};
+
+ {'EXIT', Port, Reason} when port(Port) ->
+ ?vtrace("proxy -> exit signal from port: ~p",[Reason]),
+ process_flag(trap_exit, false),
+ {proceed, [{status,{400,none,reason(Reason)}}|Info#mod.data]};
+
+ {'EXIT', Pid, Reason} when pid(Pid) ->
+ %% This is the case that a linked process has died,
+ %% It would be nice to response with a server error
+ %% but since the heade alredy is sent
+ ?vtrace("proxy -> exit signal from ~p: ~p",[Pid, Reason]),
+ proxy(Info, Port, Size, StatusCode, AccResponse, Timeout);
+
+ %% This should not happen
+ WhatEver ->
+ ?vinfo("proxy -> received garbage: ~n~p", [WhatEver]),
+ NewStatusCode = update_status_code(StatusCode, AccResponse),
+ final_send(Info, StatusCode, Size, AccResponse),
+ process_flag(trap_exit, false),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]}
+
+ after Timeout ->
+ ?vlog("proxy -> timeout",[]),
+ (catch port_close(Port)), % KILL the port !!!!
+ httpd_socket:close(Info#mod.socket_type, Info#mod.socket),
+ process_flag(trap_exit,false),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]}
+ end.
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The functions that handles the sending of the data to the client %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%----------------------------------------------------------------------
+%% Send the header the first time the size of the body is Zero
+%%----------------------------------------------------------------------
+
+send(#mod{method = "HEAD"} = Info, StatusCode, Response, 0, []) ->
+ first_handle_head_request(Info, StatusCode, Response);
+send(Info, StatusCode, Response, 0, []) ->
+ first_handle_other_request(Info, StatusCode, Response);
+
+%%----------------------------------------------------------------------
+%% The size of the body is bigger than zero =>
+%% we have a part of the body to send
+%%----------------------------------------------------------------------
+send(Info, StatusCode, Response, Size, AccResponse) ->
+ handle_other_request(Info, StatusCode, Response).
+
+
+%%----------------------------------------------------------------------
+%% The function is called the last time when the port has closed
+%%----------------------------------------------------------------------
+
+final_send(Info, StatusCode, Size, AccResponse)->
+ final_handle_other_request(Info, StatusCode).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The code that handles the head requests %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%----------------------------------------------------------------------
+%% The request is a head request if its a HTPT/1.1 request answer to it
+%% otherwise we must collect the size of hte body before we can answer.
+%% Return Values:
+%% head_sent
+%%----------------------------------------------------------------------
+first_handle_head_request(Info, StatusCode, Response)->
+ case Info#mod.http_version of
+ "HTTP/1.1" ->
+ %% Since we have all we need to create the header create it
+ %% send it and return head_sent.
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok, [HeadEnd, Rest]} ->
+ HeadEnd1 = removeStatus(HeadEnd),
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [create_header(Info,StatusCode),
+ HeadEnd1,"\r\n\r\n"]);
+ _ ->
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [create_header(Info, StatusCode),
+ "Content-Type:text/html\r\n\r\n"])
+ end;
+ _ ->
+ Response1= case regexp:split(Response,"\r\n\r\n|\n\n") of
+ {ok,[HeadEnd|Rest]} ->
+ removeStatus(HeadEnd);
+ _ ->
+ ["Content-Type:text/html"]
+ end,
+ H1 = httpd_util:header(StatusCode,Info#mod.connection),
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [H1,Response1,"\r\n\r\n"])
+ end,
+ head_sent.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Handle the requests that is to the other methods %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%----------------------------------------------------------------------
+%% Create the http-response header and send it to the user if it is
+%% a http/1.1 request otherwise we must accumulate it
+%%----------------------------------------------------------------------
+first_handle_other_request(Info,StatusCode,Response)->
+ Header = create_header(Info,StatusCode),
+ Response1 =
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok,[HeadPart,[]]} ->
+ [Header, removeStatus(HeadPart),"\r\n\r\n"];
+
+ {ok,[HeadPart,BodyPart]} ->
+ [Header, removeStatus(HeadPart), "\r\n\r\n",
+ httpd_util:integer_to_hexlist(length(BodyPart)),
+ "\r\n", BodyPart];
+ _WhatEver ->
+ %% No response header field from the cgi-script,
+ %% Just a body
+ [Header, "Content-Type:text/html","\r\n\r\n",
+ httpd_util:integer_to_hexlist(length(Response)),
+ "\r\n", Response]
+ end,
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket, Response1).
+
+
+handle_other_request(#mod{http_version = "HTTP/1.1",
+ socket_type = Type, socket = Sock} = Info,
+ StatusCode, Response0) ->
+ Response = create_chunk(Info, Response0),
+ httpd_socket:deliver(Type, Sock, Response);
+handle_other_request(#mod{socket_type = Type, socket = Sock} = Info,
+ StatusCode, Response) ->
+ httpd_socket:deliver(Type, Sock, Response).
+
+
+final_handle_other_request(#mod{http_version = "HTTP/1.1",
+ socket_type = Type, socket = Sock},
+ StatusCode) ->
+ httpd_socket:deliver(Type, Sock, "0\r\n");
+final_handle_other_request(#mod{socket_type = Type, socket = Sock},
+ StatusCode) ->
+ httpd_socket:close(Type, Sock),
+ socket_closed.
+
+
+create_chunk(_Info, Response) ->
+ HEXSize = httpd_util:integer_to_hexlist(length(lists:flatten(Response))),
+ HEXSize++"\r\n"++Response++"\r\n".
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The various helper functions %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+update_status_code(undefined, Response) ->
+ case status_code(Response) of
+ {ok, StatusCode1} ->
+ StatusCode1;
+ _ ->
+ ?vlog("invalid response from script:~n~p", [Response]),
+ 500
+ end;
+update_status_code(StatusCode,_Response)->
+ StatusCode.
+
+
+get_new_size(0,Response)->
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok,[Head,Body]}->
+ length(lists:flatten(Body));
+ _ ->
+ %%No header in the respone
+ length(lists:flatten(Response))
+ end;
+
+get_new_size(Size,Response)->
+ Size+length(lists:flatten(Response)).
+
+%%----------------------------------------------------------------------
+%% Creates the http-header for a response
+%%----------------------------------------------------------------------
+create_header(Info,StatusCode)->
+ Cache=case httpd_util:lookup(Info#mod.config_db,script_nocache,false) of
+ true->
+ Date=httpd_util:rfc1123_date(),
+ "Cache-Control:no-cache\r\nPragma:no-cache\r\nExpires:"++ Date ++ "\r\n";
+ false ->
+ []
+ end,
+ case Info#mod.http_version of
+ "HTTP/1.1" ->
+ Header=httpd_util:header(StatusCode, Info#mod.connection),
+ Header++"Transfer-encoding:chunked\r\n"++Cache;
+ _ ->
+ httpd_util:header(StatusCode,Info#mod.connection)++Cache
+ end.
+
+
+
+%% status_code
+
+status_code(Response) ->
+ case httpd_util:split(Response,"\n\n|\r\n\r\n",2) of
+ {ok,[Header,Body]} ->
+ case regexp:split(Header,"\n|\r\n") of
+ {ok,HeaderFields} ->
+ {ok,extract_status_code(HeaderFields)};
+ {error,_} ->
+ {error, bad_script_output(Response)}
+ end;
+ _ ->
+ %% No header field in the returned data return 200 the standard code
+ {ok, 200}
+ end.
+
+bad_script_output(Bad) ->
+ lists:flatten(io_lib:format("Bad script output ~s",[Bad])).
+
+
+extract_status_code([]) ->
+ 200;
+extract_status_code([[$L,$o,$c,$a,$t,$i,$o,$n,$:,$ |_]|_]) ->
+ 302;
+extract_status_code([[$S,$t,$a,$t,$u,$s,$:,$ |CodeAndReason]|_]) ->
+ case httpd_util:split(CodeAndReason," ",2) of
+ {ok,[Code,_]} ->
+ list_to_integer(Code);
+ {ok,_} ->
+ 200
+ end;
+extract_status_code([_|Rest]) ->
+ extract_status_code(Rest).
+
+
+sz(B) when binary(B) -> {binary,size(B)};
+sz(L) when list(L) -> {list,length(L)};
+sz(_) -> undefined.
+
+
+%% Convert error to printable string
+%%
+reason({error,emfile}) -> ": To many open files";
+reason({error,{enfile,_}}) -> ": File/port table overflow";
+reason({error,enomem}) -> ": Not enough memory";
+reason({error,eagain}) -> ": No more available OS processes";
+reason(_) -> "".
+
+removeStatus(Head)->
+ case httpd_util:split(Head,"Status:.\r\n",2) of
+ {ok,[HeadPart,HeadEnd]}->
+ HeadPart++HeadEnd;
+ _ ->
+ Head
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% There are 2 config directives for mod_cgi: %%
+%% ScriptNoCache true|false, defines whether the server shall add %%
+%% header fields to stop proxies and %%
+%% clients from saving the page in history %%
+%% or cache %%
+%% %%
+%% ScriptTimeout Seconds, The number of seconds that the server %%
+%% maximum will wait for the script to %%
+%% generate a part of the document %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+load([$S,$c,$r,$i,$p,$t,$N,$o,$C,$a,$c,$h,$e |CacheArg],[])->
+ case catch list_to_atom(httpd_conf:clean(CacheArg)) of
+ true ->
+ {ok, [], {script_nocache,true}};
+ false ->
+ {ok, [], {script_nocache,false}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(CacheArg)++
+ " is an invalid ScriptNoCache directive")}
+ end;
+
+load([$S,$c,$r,$i,$p,$t,$T,$i,$m,$e,$o,$u,$t,$ |Timeout],[])->
+ case catch list_to_integer(httpd_conf:clean(Timeout)) of
+ TimeoutSec when integer(TimeoutSec) ->
+ {ok, [], {script_timeout,TimeoutSec*1000}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(Timeout)++
+ " is an invalid ScriptTimeout")}
+ end.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_dir.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_dir.erl
new file mode 100644
index 0000000000..449b088055
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_dir.erl
@@ -0,0 +1,266 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_dir.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_dir).
+-export([do/1]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_dir(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_dir(Info) ->
+ ?DEBUG("do_dir -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ DefaultPath = mod_alias:default_index(Info#mod.config_db,Path),
+ %% Is it a directory?
+ case file:read_file_info(DefaultPath) of
+ {ok,FileInfo} when FileInfo#file_info.type == directory ->
+ DecodedRequestURI =
+ httpd_util:decode_hex(Info#mod.request_uri),
+ ?DEBUG("do_dir -> ~n"
+ " Path: ~p~n"
+ " DefaultPath: ~p~n"
+ " DecodedRequestURI: ~p",
+ [Path,DefaultPath,DecodedRequestURI]),
+ case dir(DefaultPath,string:strip(DecodedRequestURI,right,$/),Info#mod.config_db) of
+ {ok, Dir} ->
+ Head=[{content_type,"text/html"},
+ {content_length,integer_to_list(httpd_util:flatlength(Dir))},
+ {date,httpd_util:rfc1123_date(FileInfo#file_info.mtime)},
+ {code,200}],
+ {proceed,[{response,{response,Head,Dir}},
+ {mime_type,"text/html"}|Info#mod.data]};
+ {error, Reason} ->
+ ?ERROR("do_dir -> dir operation failed: ~p",[Reason]),
+ {proceed,
+ [{status,{404,Info#mod.request_uri,Reason}}|
+ Info#mod.data]}
+ end;
+ {ok,FileInfo} ->
+ ?DEBUG("do_dir -> ~n"
+ " Path: ~p~n"
+ " DefaultPath: ~p~n"
+ " FileInfo: ~p",
+ [Path,DefaultPath,FileInfo]),
+ {proceed,Info#mod.data};
+ {error,Reason} ->
+ ?LOG("do_dir -> failed reading file info (~p) for: ~p",
+ [Reason,DefaultPath]),
+ {proceed,
+ [{status,read_file_info_error(Reason,Info,DefaultPath)}|
+ Info#mod.data]}
+ end.
+
+dir(Path,RequestURI,ConfigDB) ->
+ case file:list_dir(Path) of
+ {ok,FileList} ->
+ SortedFileList=lists:sort(FileList),
+ {ok,[header(Path,RequestURI),
+ body(Path,RequestURI,ConfigDB,SortedFileList),
+ footer(Path,SortedFileList)]};
+ {error,Reason} ->
+ {error,?NICE("Can't open directory "++Path++": "++Reason)}
+ end.
+
+%% header
+
+header(Path,RequestURI) ->
+ Header=
+ "<HTML>\n<HEAD>\n<TITLE>Index of "++RequestURI++"</TITLE>\n</HEAD>\n<BODY>\n<H1>Index of "++
+ RequestURI++"</H1>\n<PRE><IMG SRC=\""++icon(blank)++
+ "\" ALT=" "> Name Last modified Size Description
+<HR>\n",
+ case regexp:sub(RequestURI,"[^/]*\$","") of
+ {ok,"/",_} ->
+ Header;
+ {ok,ParentRequestURI,_} ->
+ {ok,ParentPath,_}=regexp:sub(string:strip(Path,right,$/),"[^/]*\$",""),
+ Header++format(ParentPath,ParentRequestURI)
+ end.
+
+format(Path,RequestURI) ->
+ {ok,FileInfo}=file:read_file_info(Path),
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">Parent directory</A> ~2.2.0w-~s-~w ~2.2.0w:~2.2.0w -\n",
+ [icon(back),"DIR",RequestURI,Day,
+ httpd_util:month(Month),Year,Hour,Minute]).
+
+%% body
+
+body(Path,RequestURI,ConfigDB,[]) ->
+ [];
+body(Path,RequestURI,ConfigDB,[Entry|Rest]) ->
+ [format(Path,RequestURI,ConfigDB,Entry)|body(Path,RequestURI,ConfigDB,Rest)].
+
+format(Path,RequestURI,ConfigDB,Entry) ->
+ case file:read_file_info(Path++"/"++Entry) of
+ {ok,FileInfo} when FileInfo#file_info.type == directory ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ EntryLength=length(Entry),
+ if
+ EntryLength > 21 ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~-21.s..</A>~2.2.0w-~s-~w ~2.2.0w:~2.2.0w -\n",
+ [icon(folder),"DIR",RequestURI++"/"++Entry++"/",Entry,
+ Day,httpd_util:month(Month),Year,Hour,Minute]);
+ true ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~s</A>~*.*c~2.2.0w-~s-~w ~2.2.0w:~2.2.0w -\n",
+ [icon(folder),"DIR",RequestURI++"/"++Entry++"/",Entry,
+ 23-EntryLength,23-EntryLength,$ ,Day,
+ httpd_util:month(Month),Year,Hour,Minute])
+ end;
+ {ok,FileInfo} ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ Suffix=httpd_util:suffix(Entry),
+ MimeType=httpd_util:lookup_mime(ConfigDB,Suffix,""),
+ EntryLength=length(Entry),
+ if
+ EntryLength > 21 ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~-21.s..</A>~2.2.0w-~s-~w ~2.2.0w:~2.2.0w~8wk ~s\n",
+ [icon(Suffix,MimeType),Suffix,RequestURI++"/"++Entry,
+ Entry,Day,httpd_util:month(Month),Year,Hour,Minute,
+ trunc(FileInfo#file_info.size/1024+1),MimeType]);
+ true ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~s</A>~*.*c~2.2.0w-~s-~w ~2.2.0w:~2.2.0w~8wk ~s\n",
+ [icon(Suffix,MimeType),Suffix,RequestURI++"/"++Entry,
+ Entry,23-EntryLength,23-EntryLength,$ ,Day,
+ httpd_util:month(Month),Year,Hour,Minute,
+ trunc(FileInfo#file_info.size/1024+1),MimeType])
+ end;
+ {error,Reason} ->
+ ""
+ end.
+
+%% footer
+
+footer(Path,FileList) ->
+ case lists:member("README",FileList) of
+ true ->
+ {ok,Body}=file:read_file(Path++"/README"),
+ "</PRE>\n<HR>\n<PRE>\n"++binary_to_list(Body)++
+ "\n</PRE>\n</BODY>\n</HTML>\n";
+ false ->
+ "</PRE>\n</BODY>\n</HTML>\n"
+ end.
+
+%%
+%% Icon mappings are hard-wired ala default Apache (Ugly!)
+%%
+
+icon(Suffix,MimeType) ->
+ case icon(Suffix) of
+ undefined ->
+ case MimeType of
+ [$t,$e,$x,$t,$/|_] ->
+ "/icons/text.gif";
+ [$i,$m,$a,$g,$e,$/|_] ->
+ "/icons/image2.gif";
+ [$a,$u,$d,$i,$o,$/|_] ->
+ "/icons/sound2.gif";
+ [$v,$i,$d,$e,$o,$/|_] ->
+ "/icons/movie.gif";
+ _ ->
+ "/icons/unknown.gif"
+ end;
+ Icon ->
+ Icon
+ end.
+
+icon(blank) -> "/icons/blank.gif";
+icon(back) -> "/icons/back.gif";
+icon(folder) -> "/icons/folder.gif";
+icon("bin") -> "/icons/binary.gif";
+icon("exe") -> "/icons/binary.gif";
+icon("hqx") -> "/icons/binhex.gif";
+icon("tar") -> "/icons/tar.gif";
+icon("wrl") -> "/icons/world2.gif";
+icon("wrl.gz") -> "/icons/world2.gif";
+icon("vrml") -> "/icons/world2.gif";
+icon("vrm") -> "/icons/world2.gif";
+icon("iv") -> "/icons/world2.gif";
+icon("Z") -> "/icons/compressed.gif";
+icon("z") -> "/icons/compressed.gif";
+icon("tgz") -> "/icons/compressed.gif";
+icon("gz") -> "/icons/compressed.gif";
+icon("zip") -> "/icons/compressed.gif";
+icon("ps") -> "/icons/a.gif";
+icon("ai") -> "/icons/a.gif";
+icon("eps") -> "/icons/a.gif";
+icon("html") -> "/icons/layout.gif";
+icon("shtml") -> "/icons/layout.gif";
+icon("htm") -> "/icons/layout.gif";
+icon("pdf") -> "/icons/layout.gif";
+icon("txt") -> "/icons/text.gif";
+icon("erl") -> "/icons/burst.gif";
+icon("c") -> "/icons/c.gif";
+icon("pl") -> "/icons/p.gif";
+icon("py") -> "/icons/p.gif";
+icon("for") -> "/icons/f.gif";
+icon("dvi") -> "/icons/dvi.gif";
+icon("uu") -> "/icons/uuencoded.gif";
+icon("conf") -> "/icons/script.gif";
+icon("sh") -> "/icons/script.gif";
+icon("shar") -> "/icons/script.gif";
+icon("csh") -> "/icons/script.gif";
+icon("ksh") -> "/icons/script.gif";
+icon("tcl") -> "/icons/script.gif";
+icon("tex") -> "/icons/tex.gif";
+icon("core") -> "/icons/tex.gif";
+icon(_) -> undefined.
+
+
+read_file_info_error(eacces,Info,Path) ->
+ read_file_info_error(403,Info,Path,
+ ": Missing search permissions for one "
+ "of the parent directories");
+read_file_info_error(enoent,Info,Path) ->
+ read_file_info_error(404,Info,Path,"");
+read_file_info_error(enotdir,Info,Path) ->
+ read_file_info_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+read_file_info_error(_,Info,Path) ->
+ read_file_info_error(500,none,Path,"").
+
+read_file_info_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't access "++Path++Reason)};
+read_file_info_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,
+ ?NICE("Can't access "++Path++Reason)}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_disk_log.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_disk_log.erl
new file mode 100644
index 0000000000..c5d110ee4b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_disk_log.erl
@@ -0,0 +1,405 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_disk_log.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_disk_log).
+-export([do/1,error_log/5,security_log/2,load/2,store/2,remove/1]).
+
+-export([report_error/2]).
+
+-define(VMODULE,"DISK_LOG").
+-include("httpd_verbosity.hrl").
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ AuthUser = auth_user(Info#mod.data),
+ Date = custom_date(),
+ log_internal_info(Info,Date,Info#mod.data),
+ LogFormat = get_log_format(Info#mod.config_db),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ transfer_log(Info, "-", AuthUser, Date, StatusCode, 0, LogFormat),
+ if
+ StatusCode >= 400 ->
+ error_log(Info, Date, Reason, LogFormat);
+ true ->
+ not_an_error
+ end,
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ {already_sent,StatusCode,Size} ->
+ transfer_log(Info, "-", AuthUser, Date, StatusCode,
+ Size, LogFormat),
+ {proceed,Info#mod.data};
+
+ {response, Head, Body} ->
+ Size = httpd_util:key1search(Head, content_length, 0),
+ Code = httpd_util:key1search(Head, code, 200),
+ transfer_log(Info, "-", AuthUser, Date, Code,
+ Size, LogFormat),
+ {proceed,Info#mod.data};
+
+ {StatusCode,Response} ->
+ transfer_log(Info, "-", AuthUser, Date, 200,
+ httpd_util:flatlength(Response), LogFormat),
+ {proceed,Info#mod.data};
+ undefined ->
+ transfer_log(Info, "-", AuthUser, Date, 200,
+ 0, LogFormat),
+ {proceed,Info#mod.data}
+ end
+ end.
+
+custom_date() ->
+ LocalTime = calendar:local_time(),
+ UniversalTime = calendar:universal_time(),
+ Minutes = round(diff_in_minutes(LocalTime,UniversalTime)),
+ {{YYYY,MM,DD},{Hour,Min,Sec}} = LocalTime,
+ Date =
+ io_lib:format("~.2.0w/~.3s/~.4w:~.2.0w:~.2.0w:~.2.0w ~c~.2.0w~.2.0w",
+ [DD,httpd_util:month(MM),YYYY,Hour,Min,Sec,sign(Minutes),
+ abs(Minutes) div 60,abs(Minutes) rem 60]),
+ lists:flatten(Date).
+
+diff_in_minutes(L,U) ->
+ (calendar:datetime_to_gregorian_seconds(L) -
+ calendar:datetime_to_gregorian_seconds(U))/60.
+
+sign(Minutes) when Minutes > 0 ->
+ $+;
+sign(Minutes) ->
+ $-.
+
+auth_user(Data) ->
+ case httpd_util:key1search(Data,remote_user) of
+ undefined ->
+ "-";
+ RemoteUser ->
+ RemoteUser
+ end.
+
+%% log_internal_info
+
+log_internal_info(Info,Date,[]) ->
+ ok;
+log_internal_info(Info,Date,[{internal_info,Reason}|Rest]) ->
+ Format = get_log_format(Info#mod.config_db),
+ error_log(Info,Date,Reason,Format),
+ log_internal_info(Info,Date,Rest);
+log_internal_info(Info,Date,[_|Rest]) ->
+ log_internal_info(Info,Date,Rest).
+
+
+%% transfer_log
+
+transfer_log(Info,RFC931,AuthUser,Date,StatusCode,Bytes,Format) ->
+ case httpd_util:lookup(Info#mod.config_db,transfer_disk_log) of
+ undefined ->
+ no_transfer_log;
+ TransferDiskLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ Entry = io_lib:format("~s ~s ~s [~s] \"~s\" ~w ~w~n",
+ [RemoteHost,RFC931,AuthUser,Date,
+ Info#mod.request_line,StatusCode,Bytes]),
+ write(TransferDiskLog, Entry, Format)
+ end.
+
+
+%% error_log
+
+error_log(Info, Date, Reason, Format) ->
+ Format=get_log_format(Info#mod.config_db),
+ case httpd_util:lookup(Info#mod.config_db,error_disk_log) of
+ undefined ->
+ no_error_log;
+ ErrorDiskLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ Entry =
+ io_lib:format("[~s] access to ~s failed for ~s, reason: ~p~n",
+ [Date, Info#mod.request_uri,
+ RemoteHost, Reason]),
+ write(ErrorDiskLog, Entry, Format)
+ end.
+
+error_log(SocketType, Socket, ConfigDB, {PortNumber, RemoteHost}, Reason) ->
+ Format = get_log_format(ConfigDB),
+ case httpd_util:lookup(ConfigDB,error_disk_log) of
+ undefined ->
+ no_error_log;
+ ErrorDiskLog ->
+ Date = custom_date(),
+ Entry =
+ io_lib:format("[~s] server crash for ~s, reason: ~p~n",
+ [Date,RemoteHost,Reason]),
+ write(ErrorDiskLog, Entry, Format),
+ ok
+ end.
+
+
+%% security_log
+
+security_log(ConfigDB, Event) ->
+ Format = get_log_format(ConfigDB),
+ case httpd_util:lookup(ConfigDB,security_disk_log) of
+ undefined ->
+ no_error_log;
+ DiskLog ->
+ Date = custom_date(),
+ Entry = io_lib:format("[~s] ~s ~n", [Date, Event]),
+ write(DiskLog, Entry, Format),
+ ok
+ end.
+
+report_error(ConfigDB, Error) ->
+ Format = get_log_format(ConfigDB),
+ case httpd_util:lookup(ConfigDB, error_disk_log) of
+ undefined ->
+ no_error_log;
+ ErrorDiskLog ->
+ Date = custom_date(),
+ Entry = io_lib:format("[~s] reporting error: ~s",[Date,Error]),
+ write(ErrorDiskLog, Entry, Format),
+ ok
+ end.
+
+%%----------------------------------------------------------------------
+%% Get the current format of the disklog
+%%----------------------------------------------------------------------
+get_log_format(ConfigDB)->
+ httpd_util:lookup(ConfigDB,disk_log_format,external).
+
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$T,$r,$a,$n,$s,$f,$e,$r,$D,$i,$s,$k,$L,$o,$g,$S,$i,$z,$e,$ |
+ TransferDiskLogSize],[]) ->
+ case regexp:split(TransferDiskLogSize," ") of
+ {ok,[MaxBytes,MaxFiles]} ->
+ case httpd_conf:make_integer(MaxBytes) of
+ {ok,MaxBytesInteger} ->
+ case httpd_conf:make_integer(MaxFiles) of
+ {ok,MaxFilesInteger} ->
+ {ok,[],{transfer_disk_log_size,
+ {MaxBytesInteger,MaxFilesInteger}}};
+ {error,_} ->
+ {error,
+ ?NICE(httpd_conf:clean(TransferDiskLogSize)++
+ " is an invalid TransferDiskLogSize")}
+ end;
+ {error,_} ->
+ {error,?NICE(httpd_conf:clean(TransferDiskLogSize)++
+ " is an invalid TransferDiskLogSize")}
+ end
+ end;
+load([$T,$r,$a,$n,$s,$f,$e,$r,$D,$i,$s,$k,$L,$o,$g,$ |TransferDiskLog],[]) ->
+ {ok,[],{transfer_disk_log,httpd_conf:clean(TransferDiskLog)}};
+
+load([$E,$r,$r,$o,$r,$D,$i,$s,$k,$L,$o,$g,$S,$i,$z,$e,$ | ErrorDiskLogSize],[]) ->
+ case regexp:split(ErrorDiskLogSize," ") of
+ {ok,[MaxBytes,MaxFiles]} ->
+ case httpd_conf:make_integer(MaxBytes) of
+ {ok,MaxBytesInteger} ->
+ case httpd_conf:make_integer(MaxFiles) of
+ {ok,MaxFilesInteger} ->
+ {ok,[],{error_disk_log_size,
+ {MaxBytesInteger,MaxFilesInteger}}};
+ {error,_} ->
+ {error,?NICE(httpd_conf:clean(ErrorDiskLogSize)++
+ " is an invalid ErrorDiskLogSize")}
+ end;
+ {error,_} ->
+ {error,?NICE(httpd_conf:clean(ErrorDiskLogSize)++
+ " is an invalid ErrorDiskLogSize")}
+ end
+ end;
+load([$E,$r,$r,$o,$r,$D,$i,$s,$k,$L,$o,$g,$ |ErrorDiskLog],[]) ->
+ {ok, [], {error_disk_log, httpd_conf:clean(ErrorDiskLog)}};
+
+load([$S,$e,$c,$u,$r,$i,$t,$y,$D,$i,$s,$k,$L,$o,$g,$S,$i,$z,$e,$ |SecurityDiskLogSize],[]) ->
+ case regexp:split(SecurityDiskLogSize, " ") of
+ {ok, [MaxBytes, MaxFiles]} ->
+ case httpd_conf:make_integer(MaxBytes) of
+ {ok, MaxBytesInteger} ->
+ case httpd_conf:make_integer(MaxFiles) of
+ {ok, MaxFilesInteger} ->
+ {ok, [], {security_disk_log_size,
+ {MaxBytesInteger, MaxFilesInteger}}};
+ {error,_} ->
+ {error, ?NICE(httpd_conf:clean(SecurityDiskLogSize)++
+ " is an invalid SecurityDiskLogSize")}
+ end;
+ {error, _} ->
+ {error, ?NICE(httpd_conf:clean(SecurityDiskLogSize)++
+ " is an invalid SecurityDiskLogSize")}
+ end
+ end;
+load([$S,$e,$c,$u,$r,$i,$t,$y,$D,$i,$s,$k,$L,$o,$g,$ |SecurityDiskLog],[]) ->
+ {ok, [], {security_disk_log, httpd_conf:clean(SecurityDiskLog)}};
+
+load([$D,$i,$s,$k,$L,$o,$g,$F,$o,$r,$m,$a,$t,$ |Format],[]) ->
+ case httpd_conf:clean(Format) of
+ "internal" ->
+ {ok, [], {disk_log_format,internal}};
+ "external" ->
+ {ok, [], {disk_log_format,external}};
+ _Default ->
+ {ok, [], {disk_log_format,external}}
+ end.
+
+%% store
+
+store({transfer_disk_log,TransferDiskLog},ConfigList) ->
+ case create_disk_log(TransferDiskLog, transfer_disk_log_size, ConfigList) of
+ {ok,TransferDB} ->
+ {ok,{transfer_disk_log,TransferDB}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({security_disk_log,SecurityDiskLog},ConfigList) ->
+ case create_disk_log(SecurityDiskLog, security_disk_log_size, ConfigList) of
+ {ok,SecurityDB} ->
+ {ok,{security_disk_log,SecurityDB}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({error_disk_log,ErrorDiskLog},ConfigList) ->
+ case create_disk_log(ErrorDiskLog, error_disk_log_size, ConfigList) of
+ {ok,ErrorDB} ->
+ {ok,{error_disk_log,ErrorDB}};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+
+%%----------------------------------------------------------------------
+%% Open or creates the disklogs
+%%----------------------------------------------------------------------
+log_size(ConfigList, Tag) ->
+ httpd_util:key1search(ConfigList, Tag, {500*1024,8}).
+
+create_disk_log(LogFile, SizeTag, ConfigList) ->
+ Filename = httpd_conf:clean(LogFile),
+ {MaxBytes, MaxFiles} = log_size(ConfigList, SizeTag),
+ case filename:pathtype(Filename) of
+ absolute ->
+ create_disk_log(Filename, MaxBytes, MaxFiles, ConfigList);
+ volumerelative ->
+ create_disk_log(Filename, MaxBytes, MaxFiles, ConfigList);
+ relative ->
+ case httpd_util:key1search(ConfigList,server_root) of
+ undefined ->
+ {error,
+ ?NICE(Filename++
+ " is an invalid ErrorLog beacuse ServerRoot is not defined")};
+ ServerRoot ->
+ AbsoluteFilename = filename:join(ServerRoot,Filename),
+ create_disk_log(AbsoluteFilename, MaxBytes, MaxFiles,
+ ConfigList)
+ end
+ end.
+
+create_disk_log(Filename, MaxBytes, MaxFiles, ConfigList) ->
+ Format = httpd_util:key1search(ConfigList, disk_log_format, external),
+ open(Filename, MaxBytes, MaxFiles, Format).
+
+
+
+%% remove
+remove(ConfigDB) ->
+ lists:foreach(fun([DiskLog]) -> close(DiskLog) end,
+ ets:match(ConfigDB,{transfer_disk_log,'$1'})),
+ lists:foreach(fun([DiskLog]) -> close(DiskLog) end,
+ ets:match(ConfigDB,{error_disk_log,'$1'})),
+ ok.
+
+
+%%
+%% Some disk_log wrapper functions:
+%%
+
+%%----------------------------------------------------------------------
+%% Function: open/4
+%% Description: Open a disk log file.
+%% Control which format the disk log will be in. The external file
+%% format is used as default since that format was used by older
+%% implementations of inets.
+%%
+%% When the internal disk log format is used, we will do some extra
+%% controls. If the files are valid, try to repair them and if
+%% thats not possible, truncate.
+%%----------------------------------------------------------------------
+
+open(Filename, MaxBytes, MaxFiles, internal) ->
+ Opts = [{format, internal}, {repair, truncate}],
+ open1(Filename, MaxBytes, MaxFiles, Opts);
+open(Filename, MaxBytes, MaxFiles, _) ->
+ Opts = [{format, external}],
+ open1(Filename, MaxBytes, MaxFiles, Opts).
+
+open1(Filename, MaxBytes, MaxFiles, Opts0) ->
+ Opts1 = [{name, Filename}, {file, Filename}, {type, wrap}] ++ Opts0,
+ case open2(Opts1, {MaxBytes, MaxFiles}) of
+ {ok, LogDB} ->
+ {ok, LogDB};
+ {error, Reason} ->
+ ?vlog("failed opening disk log with args:"
+ "~n Filename: ~p"
+ "~n MaxBytes: ~p"
+ "~n MaxFiles: ~p"
+ "~n Opts0: ~p"
+ "~nfor reason:"
+ "~n ~p", [Filename, MaxBytes, MaxFiles, Opts0, Reason]),
+ {error,
+ ?NICE("Can't create " ++ Filename ++
+ lists:flatten(io_lib:format(", ~p",[Reason])))};
+ _ ->
+ {error, ?NICE("Can't create "++Filename)}
+ end.
+
+open2(Opts, Size) ->
+ case disk_log:open(Opts) of
+ {error, {badarg, size}} ->
+ %% File did not exist, add the size option and try again
+ disk_log:open([{size, Size} | Opts]);
+ Else ->
+ Else
+ end.
+
+
+%%----------------------------------------------------------------------
+%% Actually writes the entry to the disk_log. If the log is an
+%% internal disk_log write it with log otherwise with blog.
+%%----------------------------------------------------------------------
+write(Log, Entry, internal) ->
+ disk_log:log(Log, Entry);
+
+write(Log, Entry, _) ->
+ disk_log:blog(Log, Entry).
+
+%% Close the log file
+close(Log) ->
+ disk_log:close(Log).
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_esi.erl
new file mode 100644
index 0000000000..d527f36788
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_esi.erl
@@ -0,0 +1,490 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_esi.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_esi).
+-export([do/1,load/2]).
+
+%%Functions provided to help erl scheme alias programmer to
+%%Create dynamic webpages that are sent back to the user during
+%%Generation
+-export([deliver/2]).
+
+
+-include("httpd.hrl").
+
+-define(VMODULE,"ESI").
+-include("httpd_verbosity.hrl").
+
+-define(GATEWAY_INTERFACE,"CGI/1.1").
+-define(DEFAULT_ERL_TIMEOUT,15000).
+%% do
+
+do(Info) ->
+ ?vtrace("do",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ case erl_or_eval(Info#mod.request_uri,
+ Info#mod.config_db) of
+ {eval,CGIBody,Modules} ->
+ eval(Info,Info#mod.method,CGIBody,Modules);
+ {erl,CGIBody,Modules} ->
+ erl(Info,Info#mod.method,CGIBody,Modules);
+ proceed ->
+ {proceed,Info#mod.data}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+
+
+%% erl_or_eval
+
+erl_or_eval(RequestURI, ConfigDB) ->
+ case erlp(RequestURI, ConfigDB) of
+ false ->
+ case evalp(RequestURI, ConfigDB) of
+ false ->
+ ?vtrace("neither erl nor eval",[]),
+ proceed;
+ Other ->
+ Other
+ end;
+ Other ->
+ Other
+ end.
+
+erlp(RequestURI, ConfigDB) ->
+ case httpd_util:multi_lookup(ConfigDB, erl_script_alias) of
+ [] ->
+ false;
+ AliasMods ->
+ erlp_find_alias(RequestURI,AliasMods)
+ end.
+
+erlp_find_alias(_RequestURI,[]) ->
+ ?vtrace("erlp_find_alias -> no match",[]),
+ false;
+erlp_find_alias(RequestURI,[{Alias,Modules}|Rest]) ->
+ case regexp:first_match(RequestURI,"^"++Alias++"/") of
+ {match,1,Length} ->
+ ?vtrace("erlp -> match with Length: ~p",[Length]),
+ {erl,string:substr(RequestURI,Length+1),Modules};
+ nomatch ->
+ erlp_find_alias(RequestURI,Rest)
+ end.
+
+evalp(RequestURI, ConfigDB) ->
+ case httpd_util:multi_lookup(ConfigDB, eval_script_alias) of
+ [] ->
+ false;
+ AliasMods ->
+ evalp_find_alias(RequestURI,AliasMods)
+ end.
+
+evalp_find_alias(_RequestURI,[]) ->
+ ?vtrace("evalp_find_alias -> no match",[]),
+ false;
+evalp_find_alias(RequestURI,[{Alias,Modules}|Rest]) ->
+ case regexp:first_match(RequestURI,"^"++Alias++"\\?") of
+ {match, 1, Length} ->
+ ?vtrace("evalp_find_alias -> match with Length: ~p",[Length]),
+ {eval, string:substr(RequestURI,Length+1),Modules};
+ nomatch ->
+ evalp_find_alias(RequestURI,Rest)
+ end.
+
+
+%%
+%% Erl mechanism
+%%
+
+%%This is exactly the same as the GET method the difference is that
+%%The response must not contain any data expect the response header
+
+
+erl(Info,"HEAD",CGIBody,Modules) ->
+ erl(Info,"GET",CGIBody,Modules);
+
+erl(Info,"GET",CGIBody,Modules) ->
+ ?vtrace("erl GET request",[]),
+ case httpd_util:split(CGIBody,":|%3A|/",2) of
+ {ok, [Mod,FuncAndInput]} ->
+ ?vtrace("~n Mod: ~p"
+ "~n FuncAndInput: ~p",[Mod,FuncAndInput]),
+ case httpd_util:split(FuncAndInput,"[\?/]",2) of
+ {ok, [Func,Input]} ->
+ ?vtrace("~n Func: ~p"
+ "~n Input: ~p",[Func,Input]),
+ exec(Info,"GET",CGIBody,Modules,Mod,Func,
+ {input_type(FuncAndInput),Input});
+ {ok, [Func]} ->
+ exec(Info,"GET",CGIBody,Modules,Mod,Func,{no_input,""});
+ {ok, BadRequest} ->
+ {proceed,[{status,{400,none,BadRequest}}|Info#mod.data]}
+ end;
+ {ok, BadRequest} ->
+ ?vlog("erl BAD (GET-) request",[]),
+ {proceed, [{status,{400,none,BadRequest}}|Info#mod.data]}
+ end;
+
+erl(Info, "POST", CGIBody, Modules) ->
+ ?vtrace("erl POST request",[]),
+ case httpd_util:split(CGIBody,":|%3A|/",2) of
+ {ok,[Mod,Func]} ->
+ ?vtrace("~n Mod: ~p"
+ "~n Func: ~p",[Mod,Func]),
+ exec(Info,"POST",CGIBody,Modules,Mod,Func,
+ {entity_body,Info#mod.entity_body});
+ {ok,BadRequest} ->
+ ?vlog("erl BAD (POST-) request",[]),
+ {proceed,[{status,{400,none,BadRequest}}|Info#mod.data]}
+ end.
+
+input_type([]) ->
+ no_input;
+input_type([$/|Rest]) ->
+ path_info;
+input_type([$?|Rest]) ->
+ query_string;
+input_type([First|Rest]) ->
+ input_type(Rest).
+
+
+%% exec
+
+exec(Info,Method,CGIBody,["all"],Mod,Func,{Type,Input}) ->
+ ?vtrace("exec ~s 'all'",[Method]),
+ exec(Info,Method,CGIBody,[Mod],Mod,Func,{Type,Input});
+exec(Info,Method,CGIBody,Modules,Mod,Func,{Type,Input}) ->
+ ?vtrace("exec ~s request with:"
+ "~n Modules: ~p"
+ "~n Mod: ~p"
+ "~n Func: ~p"
+ "~n Type: ~p"
+ "~n Input: ~p",
+ [Method,Modules,Mod,Func,Type,Input]),
+ case lists:member(Mod,Modules) of
+ true ->
+ {_,RemoteAddr}=(Info#mod.init_data)#init_data.peername,
+ ServerName=(Info#mod.init_data)#init_data.resolve,
+ Env=get_environment(Info,ServerName,Method,RemoteAddr,Type,Input),
+ ?vtrace("and now call the module",[]),
+ case try_new_erl_scheme_method(Info,Env,Input,list_to_atom(Mod),list_to_atom(Func)) of
+ {error,not_new_method}->
+ case catch apply(list_to_atom(Mod),list_to_atom(Func),[Env,Input]) of
+ {'EXIT',Reason} ->
+ ?vlog("exit with Reason: ~p",[Reason]),
+ {proceed,[{status,{500,none,Reason}}|Info#mod.data]};
+ Response ->
+ control_response_header(Info,Mod,Func,Response)
+ end;
+ ResponseResult->
+ ResponseResult
+ end;
+ false ->
+ ?vlog("unknown module",[]),
+ {proceed,[{status,{403,Info#mod.request_uri,
+ ?NICE("Client not authorized to evaluate: "++CGIBody)}}|Info#mod.data]}
+ end.
+
+control_response_header(Info,Mod,Func,Response)->
+ case control_response(Response,Info,Mod,Func) of
+ {proceed,[{response,{StatusCode,Response}}|Rest]} ->
+ case httpd_util:lookup(Info#mod.config_db,erl_script_nocache,false) of
+ true ->
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok,[Head,Body]}->
+ Date=httpd_util:rfc1123_date(),
+ Cache="Cache-Control:no-cache\r\nPragma:no-cache\r\nExpires:"++ Date ++ "\r\n",
+ {proceed,[{response,{StatusCode,[Head,"\r\n",Cache,"\r\n",Body]}}|Rest]};
+ _->
+ {proceed,[{response,{StatusCode,Response}}|Rest]}
+ end;
+ WhatEver->
+ {proceed,[{response,{StatusCode,Response}}|Rest]}
+ end;
+ WhatEver->
+ WhatEver
+ end.
+
+control_response(Response,Info,Mod,Func)->
+ ?vdebug("Response: ~n~p",[Response]),
+ case mod_cgi:status_code(lists:flatten(Response)) of
+ {ok,StatusCode} ->
+ {proceed,[{response,{StatusCode,Response}}|Info#mod.data]};
+ {error,Reason} ->
+ {proceed,
+ [{status,{400,none,
+ ?NICE("Error in "++Mod++":"++Func++"/2: "++
+ lists:flatten(io_lib:format("~p",[Reason])))}}|
+ Info#mod.data]}
+ end.
+
+parsed_header([]) ->
+ [];
+parsed_header([{Name,[Value|R1]}|R2]) when list(Value) ->
+ NewName=lists:map(fun(X) -> if X == $- -> $_; true -> X end end,Name),
+ [{list_to_atom("http_"++httpd_util:to_lower(NewName)),
+ multi_value([Value|R1])}|parsed_header(R2)];
+parsed_header([{Name,Value}|Rest]) when list(Value)->
+ {ok,NewName,_}=regexp:gsub(Name,"-","_"),
+ [{list_to_atom("http_"++httpd_util:to_lower(NewName)),Value}|
+ parsed_header(Rest)].
+
+multi_value([]) ->
+ [];
+multi_value([Value]) ->
+ Value;
+multi_value([Value|Rest]) ->
+ Value++", "++multi_value(Rest).
+
+%%
+%% Eval mechanism
+%%
+
+
+eval(Info,"POST",CGIBody,Modules) ->
+ ?vtrace("eval(POST) -> method not supported",[]),
+ {proceed,[{status,{501,{"POST",Info#mod.request_uri,Info#mod.http_version},
+ ?NICE("Eval mechanism doesn't support method POST")}}|
+ Info#mod.data]};
+
+eval(Info,"HEAD",CGIBody,Modules) ->
+ %%The function that sends the data in httpd_response handles HEAD reqest by not
+ %% Sending the body
+ eval(Info,"GET",CGIBody,Modules);
+
+
+eval(Info,"GET",CGIBody,Modules) ->
+ ?vtrace("eval(GET) -> entry when"
+ "~n Modules: ~p",[Modules]),
+ case auth(CGIBody,Modules) of
+ true ->
+ case lib:eval_str(string:concat(CGIBody,". ")) of
+ {error,Reason} ->
+ ?vlog("eval -> error:"
+ "~n Reason: ~p",[Reason]),
+ {proceed,[{status,{500,none,Reason}}|Info#mod.data]};
+ {ok,Response} ->
+ ?vtrace("eval -> ok:"
+ "~n Response: ~p",[Response]),
+ case mod_cgi:status_code(lists:flatten(Response)) of
+ {ok,StatusCode} ->
+ {proceed,[{response,{StatusCode,Response}}|Info#mod.data]};
+ {error,Reason} ->
+ {proceed,[{status,{400,none,Reason}}|Info#mod.data]}
+ end
+ end;
+ false ->
+ ?vlog("eval -> auth failed",[]),
+ {proceed,[{status,
+ {403,Info#mod.request_uri,
+ ?NICE("Client not authorized to evaluate: "++CGIBody)}}|
+ Info#mod.data]}
+ end.
+
+auth(CGIBody,["all"]) ->
+ true;
+auth(CGIBody,Modules) ->
+ case regexp:match(CGIBody,"^[^\:(%3A)]*") of
+ {match,Start,Length} ->
+ lists:member(string:substr(CGIBody,Start,Length),Modules);
+ nomatch ->
+ false
+ end.
+
+%%----------------------------------------------------------------------
+%%Creates the environment list that will be the first arg to the
+%%Functions that is called through the ErlScript Schema
+%%----------------------------------------------------------------------
+
+get_environment(Info,ServerName,Method,RemoteAddr,Type,Input)->
+ Env=[{server_software,?SERVER_SOFTWARE},
+ {server_name,ServerName},
+ {gateway_interface,?GATEWAY_INTERFACE},
+ {server_protocol,?SERVER_PROTOCOL},
+ {server_port,httpd_util:lookup(Info#mod.config_db,port,80)},
+ {request_method,Method},
+ {remote_addr,RemoteAddr},
+ {script_name,Info#mod.request_uri}|
+ parsed_header(Info#mod.parsed_header)],
+ get_environment(Type,Input,Env,Info).
+
+
+get_environment(Type,Input,Env,Info)->
+ Env1=case Type of
+ query_string ->
+ [{query_string,Input}|Env];
+ path_info ->
+ Aliases=httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_,PathTranslated,_}=mod_alias:real_name(Info#mod.config_db,[$/|Input],Aliases),
+ [{path_info,"/"++httpd_util:decode_hex(Input)},
+ {path_translated,PathTranslated}|Env];
+ entity_body ->
+ [{content_length,httpd_util:flatlength(Input)}|Env];
+ no_input ->
+ Env
+ end,
+ get_environment(Info,Env1).
+
+get_environment(Info,Env)->
+ case httpd_util:key1search(Info#mod.data,remote_user) of
+ undefined ->
+ Env;
+ RemoteUser ->
+ [{remote_user,RemoteUser}|Env]
+ end.
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$E,$r,$l,$S,$c,$r,$i,$p,$t,$A,$l,$i,$a,$s,$ |ErlScriptAlias],[]) ->
+ case regexp:split(ErlScriptAlias," ") of
+ {ok, [ErlName|Modules]} ->
+ {ok, [], {erl_script_alias, {ErlName,Modules}}};
+ {ok, _} ->
+ {error,?NICE(httpd_conf:clean(ErlScriptAlias)++
+ " is an invalid ErlScriptAlias")}
+ end;
+load([$E,$v,$a,$l,$S,$c,$r,$i,$p,$t,$A,$l,$i,$a,$s,$ |EvalScriptAlias],[]) ->
+ case regexp:split(EvalScriptAlias, " ") of
+ {ok, [EvalName|Modules]} ->
+ {ok, [], {eval_script_alias, {EvalName,Modules}}};
+ {ok, _} ->
+ {error, ?NICE(httpd_conf:clean(EvalScriptAlias)++
+ " is an invalid EvalScriptAlias")}
+ end;
+load([$E,$r,$l,$S,$c,$r,$i,$p,$t,$T,$i,$m,$e,$o,$u,$t,$ |Timeout],[])->
+ case catch list_to_integer(httpd_conf:clean(Timeout)) of
+ TimeoutSec when integer(TimeoutSec) ->
+ {ok, [], {erl_script_timeout,TimeoutSec*1000}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(Timeout)++
+ " is an invalid ErlScriptTimeout")}
+ end;
+load([$E,$r,$l,$S,$c,$r,$i,$p,$t,$N,$o,$C,$a,$c,$h,$e |CacheArg],[])->
+ case catch list_to_atom(httpd_conf:clean(CacheArg)) of
+ true ->
+ {ok, [], {erl_script_nocache,true}};
+ false ->
+ {ok, [], {erl_script_nocache,false}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(CacheArg)++
+ " is an invalid ErlScriptNoCache directive")}
+ end.
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Functions below handles the data from the dynamic webpages %%
+%% That sends data back to the user part by part %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%----------------------------------------------------------------------
+%%Deliver is the callback function users can call to deliver back data to the
+%%client
+%%----------------------------------------------------------------------
+
+deliver(SessionID,Data)when pid(SessionID) ->
+ SessionID ! {ok,Data},
+ ok;
+deliver(SessionID,Data) ->
+ {error,bad_sessionID}.
+
+
+%%----------------------------------------------------------------------
+%% The method that tries to execute the new format
+%%----------------------------------------------------------------------
+
+%%It would be nicer to use erlang:function_exported/3 but if the
+%%Module isn't loaded the function says that it is not loaded
+
+
+try_new_erl_scheme_method(Info,Env,Input,Mod,Func)->
+ process_flag(trap_exit,true),
+ Pid=spawn_link(Mod,Func,[self(),Env,Input]),
+ Timeout=httpd_util:lookup(Info#mod.config_db,erl_script_timeout,?DEFAULT_ERL_TIMEOUT),
+ RetVal=receive_response_data(Info,Pid,0,undefined,[],Timeout),
+ process_flag(trap_exit,false),
+ RetVal.
+
+
+%%----------------------------------------------------------------------
+%%The function recieves the data from the process that generates the page
+%%and send the data to the client through the mod_cgi:send function
+%%----------------------------------------------------------------------
+
+receive_response_data(Info,Pid,Size,StatusCode,AccResponse,Timeout) ->
+ ?DEBUG("receive_response_data()-> Script Size: ~p,StatusCode ~p ,Timeout: ~p ~n",[Size,StatusCode,Timeout]),
+ receive
+ {ok, Response} ->
+ NewStatusCode=mod_cgi:update_status_code(StatusCode,Response),
+
+ ?DEBUG("receive_response_data/2 NewStatusCode: ~p~n",[NewStatusCode]),
+ case mod_cgi:send(Info, NewStatusCode,Response, Size,AccResponse) of
+ socket_closed ->
+ (catch exit(Pid,final)),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]};
+ head_sent->
+ (catch exit(Pid,final)),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]};
+ _ ->
+ %%The data is sent and the socket is not closed contine
+ NewSize = mod_cgi:get_new_size(Size,Response),
+ receive_response_data(Info,Pid,NewSize,NewStatusCode,"notempty",Timeout)
+ end;
+ {'EXIT', Pid, Reason} when AccResponse==[] ->
+ {error,not_new_method};
+ {'EXIT', Pid, Reason} when pid(Pid) ->
+ NewStatusCode=mod_cgi:update_status_code(StatusCode,AccResponse),
+ mod_cgi:final_send(Info,NewStatusCode,Size,AccResponse),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]};
+ %% This should not happen!
+ WhatEver ->
+ NewStatusCode=mod_cgi:update_status_code(StatusCode,AccResponse),
+ mod_cgi:final_send(Info,StatusCode,Size,AccResponse),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]}
+ after
+ Timeout ->
+ (catch exit(Pid,timeout)), % KILL the port !!!!
+ httpd_socket:close(Info#mod.socket_type,Info#mod.socket),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]}
+ end.
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_get.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_get.erl
new file mode 100644
index 0000000000..02f708f85b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_get.erl
@@ -0,0 +1,179 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_get.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_get).
+-export([do/1]).
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_get(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+
+do_get(Info) ->
+ ?DEBUG("do_get -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri),
+ {FileInfo, LastModified} =get_modification_date(Path),
+
+ send_response(Info#mod.socket,Info#mod.socket_type,Path,Info,FileInfo,LastModified).
+
+
+%%The common case when no range is specified
+send_response(Socket,SocketType,Path,Info,FileInfo,LastModified)->
+ %% Send the file!
+ %% Find the modification date of the file
+ case file:open(Path,[raw,binary]) of
+ {ok, FileDescriptor} ->
+ ?DEBUG("do_get -> FileDescriptor: ~p",[FileDescriptor]),
+ Suffix = httpd_util:suffix(Path),
+ MimeType = httpd_util:lookup_mime_default(Info#mod.config_db,
+ Suffix,"text/plain"),
+ %FileInfo=file:read_file_info(Path),
+ Date = httpd_util:rfc1123_date(),
+ Size = integer_to_list(FileInfo#file_info.size),
+ Header=case Info#mod.http_version of
+ "HTTP/1.1" ->
+ [httpd_util:header(200, MimeType, Info#mod.connection),
+ "Last-Modified: ", LastModified, "\r\n",
+ "Etag: ",httpd_util:create_etag(FileInfo),"\r\n",
+ "Content-Length: ",Size,"\r\n\r\n"];
+ "HTTP/1.0" ->
+ [httpd_util:header(200, MimeType, Info#mod.connection),
+ "Last-Modified: ", LastModified, "\r\n",
+ "Content-Length: ",Size,"\r\n\r\n"]
+ end,
+
+ send(Info#mod.socket_type, Info#mod.socket,
+ Header, FileDescriptor),
+ file:close(FileDescriptor),
+ {proceed,[{response,{already_sent,200,
+ FileInfo#file_info.size}},
+ {mime_type,MimeType}|Info#mod.data]};
+ {error, Reason} ->
+
+ {proceed,
+ [{status,open_error(Reason,Info,Path)}|Info#mod.data]}
+ end.
+
+%% send
+
+send(SocketType,Socket,Header,FileDescriptor) ->
+ ?DEBUG("send -> send header",[]),
+ case httpd_socket:deliver(SocketType,Socket,Header) of
+ socket_closed ->
+ ?LOG("send -> socket closed while sending header",[]),
+ socket_close;
+ _ ->
+ send_body(SocketType,Socket,FileDescriptor)
+ end.
+
+send_body(SocketType,Socket,FileDescriptor) ->
+ case file:read(FileDescriptor,?FILE_CHUNK_SIZE) of
+ {ok,Binary} ->
+ ?DEBUG("send_body -> send another chunk: ~p",[size(Binary)]),
+ case httpd_socket:deliver(SocketType,Socket,Binary) of
+ socket_closed ->
+ ?LOG("send_body -> socket closed while sending",[]),
+ socket_close;
+ _ ->
+ send_body(SocketType,Socket,FileDescriptor)
+ end;
+ eof ->
+ ?DEBUG("send_body -> done with this file",[]),
+ eof
+ end.
+
+
+%% open_error - Handle file open failure
+%%
+open_error(eacces,Info,Path) ->
+ open_error(403,Info,Path,"");
+open_error(enoent,Info,Path) ->
+ open_error(404,Info,Path,"");
+open_error(enotdir,Info,Path) ->
+ open_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+open_error(emfile,_Info,Path) ->
+ open_error(500,none,Path,": To many open files");
+open_error({enfile,_},_Info,Path) ->
+ open_error(500,none,Path,": File table overflow");
+open_error(_Reason,_Info,Path) ->
+ open_error(500,none,Path,"").
+
+open_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't open "++Path++Reason)};
+open_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,?NICE("Can't open "++Path++Reason)}.
+
+get_modification_date(Path)->
+ case file:read_file_info(Path) of
+ {ok, FileInfo0} ->
+ {FileInfo0, httpd_util:rfc1123_date(FileInfo0#file_info.mtime)};
+ _ ->
+ {#file_info{},""}
+ end.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_head.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_head.erl
new file mode 100644
index 0000000000..542604e092
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_head.erl
@@ -0,0 +1,89 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_head.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_head).
+-export([do/1]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "HEAD" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ _undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_head(Info);
+ %% A response has been sent! Nothing to do about it!
+ {already_sent,StatusCode,Size} ->
+ {proceed,Info#mod.data};
+ %% A response has been generated!
+ {StatusCode,Response} ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a HEAD method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_head(Info) ->
+ ?DEBUG("do_head -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ Suffix = httpd_util:suffix(Path),
+ %% Does the file exists?
+ case file:read_file_info(Path) of
+ {ok,FileInfo} ->
+ MimeType=httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Length=io_lib:write(FileInfo#file_info.size),
+ Head=[{content_type,MimeType},{content_length,Length},{code,200}],
+ {proceed,[{response,{response,Head,nobody}}|Info#mod.data]};
+ {error,Reason} ->
+ {proceed,
+ [{status,read_file_info_error(Reason,Info,Path)}|Info#mod.data]}
+ end.
+
+%% read_file_info_error - Handle file info read failure
+%%
+read_file_info_error(eacces,Info,Path) ->
+ read_file_info_error(403,Info,Path,"");
+read_file_info_error(enoent,Info,Path) ->
+ read_file_info_error(404,Info,Path,"");
+read_file_info_error(enotdir,Info,Path) ->
+ read_file_info_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+read_file_info_error(emfile,_Info,Path) ->
+ read_file_info_error(500,none,Path,": To many open files");
+read_file_info_error({enfile,_},_Info,Path) ->
+ read_file_info_error(500,none,Path,": File table overflow");
+read_file_info_error(_Reason,_Info,Path) ->
+ read_file_info_error(500,none,Path,"").
+
+read_file_info_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't access "++Path++Reason)};
+read_file_info_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,
+ ?NICE("Can't access "++Path++Reason)}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_htaccess.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_htaccess.erl
new file mode 100644
index 0000000000..069e4ad3a9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_htaccess.erl
@@ -0,0 +1,1150 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_htaccess.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+
+-module(mod_htaccess).
+
+-export([do/1, load/2]).
+-export([debug/0]).
+
+-include("httpd.hrl").
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Public methods that interface the eswapi %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+% Public method called by the webbserver to insert the data about
+% Names on accessfiles
+%----------------------------------------------------------------------
+load([$A,$c,$c,$e,$s,$s,$F,$i,$l,$e,$N,$a,$m,$e|FileNames],Context)->
+ CleanFileNames=httpd_conf:clean(FileNames),
+ %%io:format("\n The filenames is:" ++ FileNames ++ "\n"),
+ {ok,[],{access_files,string:tokens(CleanFileNames," ")}}.
+
+
+%----------------------------------------------------------------------
+% Public method that the webbserver calls to control the page
+%----------------------------------------------------------------------
+do(Info)->
+ case httpd_util:key1search(Info#mod.data,status) of
+ {Status_code,PhraseArgs,Reason}->
+ {proceed,Info#mod.data};
+ undefined ->
+ control_path(Info)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The functions that start the control if there is a accessfile %%
+%% and if so controls if the dir is allowed or not %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%Info = record mod as specified in httpd.hrl
+%returns either {proceed,Info#mod.data}
+%{proceed,[{status,403....}|Info#mod.data]}
+%{proceed,[{status,401....}|Info#mod.data]}
+%{proceed,[{status,500....}|Info#mod.data]}
+%----------------------------------------------------------------------
+control_path(Info) ->
+ Path = mod_alias:path(Info#mod.data,
+ Info#mod.config_db,
+ Info#mod.request_uri),
+ case isErlScriptOrNotAccessibleFile(Path,Info) of
+ true->
+ {proceed,Info#mod.data};
+ false->
+ case getHtAccessData(Path,Info)of
+ {ok,public}->
+ %%There was no restrictions on the page continue
+ {proceed,Info#mod.data};
+ {error,Reason} ->
+ %Something got wrong continue or quit??????????????????/
+ {proceed,Info#mod.data};
+ {accessData,AccessData}->
+ controlAllowedMethod(Info,AccessData)
+ end
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% These methods controls that the method the client used in the %%
+%% request is one of the limited %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%Control that if the accessmethod used is in the list of modes to challenge
+%
+%Info is the mod record as specified in httpd.hrl
+%AccessData is an ets table whit the data in the .htaccessfiles
+%----------------------------------------------------------------------
+controlAllowedMethod(Info,AccessData)->
+ case allowedRequestMethod(Info,AccessData) of
+ allow->
+ %%The request didnt use one of the limited methods
+ ets:delete(AccessData),
+ {proceed,Info#mod.data};
+ challenge->
+ authenticateUser(Info,AccessData)
+ end.
+
+%----------------------------------------------------------------------
+%Check the specified access method in the .htaccessfile
+%----------------------------------------------------------------------
+allowedRequestMethod(Info,AccessData)->
+ case ets:lookup(AccessData,limit) of
+ [{limit,all}]->
+ challenge;
+ [{limit,Methods}]->
+ isLimitedRequestMethod(Info,Methods)
+ end.
+
+
+%----------------------------------------------------------------------
+%Check the specified accessmethods in the .htaccesfile against the users
+%accessmethod
+%
+%Info is the record from the do call
+%Methods is a list of the methods specified in the .htaccessfile
+%----------------------------------------------------------------------
+isLimitedRequestMethod(Info,Methods)->
+ case lists:member(Info#mod.method,Methods) of
+ true->
+ challenge;
+ false ->
+ allow
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% These methods controls that the user comes from an allowwed net %%
+%% and if so wheather its a valid user or a challenge shall be %%
+%% generated %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%The first thing to control is that the user is from a network
+%that has access to the page
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData)->
+ case controlNet(Info,AccessData) of
+ allow->
+ %the network is ok control that it is an allowed user
+ authenticateUser2(Info,AccessData);
+ deny->
+ %The user isnt allowed to access the pages from that network
+ ets:delete(AccessData),
+ {proceed,[{status,{403,Info#mod.request_uri,
+ "Restricted area not allowed from your network"}}|Info#mod.data]}
+ end.
+
+
+%----------------------------------------------------------------------
+%The network the user comes from is allowed to view the resources
+%control whether the user needsto supply a password or not
+%----------------------------------------------------------------------
+authenticateUser2(Info,AccessData)->
+ case ets:lookup(AccessData,require) of
+ [{require,AllowedUsers}]->
+ case ets:lookup(AccessData,auth_name) of
+ [{auth_name,Realm}]->
+ authenticateUser2(Info,AccessData,Realm,AllowedUsers);
+ _NoAuthName->
+ ets:delete(AccessData),
+ {break,[{status,{500,none,
+ ?NICE("mod_htaccess:AuthName directive not specified")}}]}
+ end;
+ [] ->
+ %%No special user is required the network is ok so let
+ %%the user in
+ ets:delete(AccessData),
+ {proceed,Info#mod.data}
+ end.
+
+
+%----------------------------------------------------------------------
+%The user must send a userId and a password to get the resource
+%Control if its already in the http-request
+%if the file with users is bad send an 500 response
+%----------------------------------------------------------------------
+authenticateUser2(Info,AccessData,Realm,AllowedUsers)->
+ case authenticateUser(Info,AccessData,AllowedUsers) of
+ allow ->
+ ets:delete(AccessData),
+ {user,Name,Pwd}=getAuthenticatingDataFromHeader(Info),
+ {proceed, [{remote_user_name,Name}|Info#mod.data]};
+ challenge->
+ ets:delete(AccessData),
+ ReasonPhrase = httpd_util:reason_phrase(401),
+ Message = httpd_util:message(401,none,Info#mod.config_db),
+ {proceed,
+ [{response,
+ {401,
+ ["WWW-Authenticate: Basic realm=\"",Realm,
+ "\"\r\n\r\n","<HTML>\n<HEAD>\n<TITLE>",
+ ReasonPhrase,"</TITLE>\n",
+ "</HEAD>\n<BODY>\n<H1>",ReasonPhrase,
+ "</H1>\n",Message,"\n</BODY>\n</HTML>\n"]}}|
+ Info#mod.data]};
+ deny->
+ ets:delete(AccessData),
+ {break,[{status,{500,none,
+ ?NICE("mod_htaccess:Bad path to user or group file")}}]}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Methods that validate the netwqork the user comes from %%
+%% according to the allowed networks %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%---------------------------------------------------------------------
+%Controls the users networkaddress agains the specifed networks to
+%allow or deny
+%
+%returns either allow or deny
+%----------------------------------------------------------------------
+controlNet(Info,AccessData)->
+ UserNetwork=getUserNetworkAddress(Info),
+ case getAllowDenyOrder(AccessData) of
+ {_deny,[],_allow,[]}->
+ allow;
+ {deny,[],allow,AllowedNetworks}->
+ controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny);
+ {allow,AllowedNetworks,deny,[]}->
+ controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny);
+
+ {deny,DeniedNetworks,allow,[]}->
+ controlIfAllowed(DeniedNetworks,UserNetwork,allow,deny);
+ {allow,[],deny,DeniedNetworks}->
+ controlIfAllowed(DeniedNetworks,UserNetwork,allow,deny);
+
+ {deny,DeniedNetworks,allow,AllowedNetworks}->
+ controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork);
+ {allow,AllowedNetworks,deny,DeniedNetworks}->
+ controlAllowDeny(AllowedNetworks,DeniedNetworks,UserNetwork)
+ end.
+
+
+%----------------------------------------------------------------------
+%Returns the users IP-Number
+%----------------------------------------------------------------------
+getUserNetworkAddress(Info)->
+ {_Socket,Address}=(Info#mod.init_data)#init_data.peername,
+ Address.
+
+
+%----------------------------------------------------------------------
+%Control the users Ip-number against the ip-numbers in the .htaccessfile
+%----------------------------------------------------------------------
+controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)->
+ case AllowedNetworks of
+ [{allow,all}]->
+ IfAllowed;
+ [{deny,all}]->
+ IfDenied;
+ [{deny,Networks}]->
+ memberNetwork(Networks,UserNetwork,IfDenied,IfAllowed);
+ [{allow,Networks}]->
+ memberNetwork(Networks,UserNetwork,IfAllowed,IfDenied);
+ _Error->
+ IfDenied
+ end.
+
+
+%---------------------------------------------------------------------%
+%The Denycontrol isn't neccessary to preform since the allow control %
+%override the deny control %
+%---------------------------------------------------------------------%
+controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)->
+ case AllowedNetworks of
+ [{allow,all}]->
+ allow;
+ [{allow,Networks}]->
+ case memberNetwork(Networks,UserNetwork) of
+ true->
+ allow;
+ false->
+ deny
+ end
+ end.
+
+
+%----------------------------------------------------------------------%
+%Control that the user is in the allowed list if so control that the %
+%network is in the denied list
+%----------------------------------------------------------------------%
+controlAllowDeny(AllowedNetworks,DeniedNetworks,UserNetwork)->
+ case controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny) of
+ allow->
+ controlIfAllowed(DeniedNetworks,UserNetwork,deny,allow);
+ deny ->
+ deny
+ end.
+
+%----------------------------------------------------------------------
+%Controls if the users Ipnumber is in the list of either denied or
+%allowed networks
+%----------------------------------------------------------------------
+memberNetwork(Networks,UserNetwork,IfTrue,IfFalse)->
+ case memberNetwork(Networks,UserNetwork) of
+ true->
+ IfTrue;
+ false->
+ IfFalse
+ end.
+
+
+%----------------------------------------------------------------------
+%regexp match the users ip-address against the networks in the list of
+%ipadresses or subnet addresses.
+memberNetwork(Networks,UserNetwork)->
+ case lists:filter(fun(Net)->
+ case regexp:match(UserNetwork,
+ formatRegexp(Net)) of
+ {match,1,_}->
+ true;
+ _NotSubNet ->
+ false
+ end
+ end,Networks) of
+ []->
+ false;
+ MemberNetWork ->
+ true
+ end.
+
+
+%----------------------------------------------------------------------
+%Creates a regexp from an ip-number i.e "127.0.0-> "^127[.]0[.]0.*"
+%"127.0.0.-> "^127[.]0[.]0[.].*"
+%----------------------------------------------------------------------
+formatRegexp(Net)->
+ [SubNet1|SubNets]=string:tokens(Net,"."),
+ NetRegexp=lists:foldl(fun(SubNet,Newnet)->
+ Newnet ++ "[.]" ++SubNet
+ end,"^"++SubNet1,SubNets),
+ case string:len(Net)-string:rchr(Net,$.) of
+ 0->
+ NetRegexp++"[.].*";
+ _->
+ NetRegexp++".*"
+ end.
+
+
+%----------------------------------------------------------------------
+%If the user has specified if the allow or deny check shall be preformed
+%first get that order if no order is specified take
+%allow - deny since its harder that deny - allow
+%----------------------------------------------------------------------
+getAllowDenyOrder(AccessData)->
+ case ets:lookup(AccessData,order) of
+ [{order,{deny,allow}}]->
+ {deny,ets:lookup(AccessData,deny),
+ allow,ets:lookup(AccessData,allow)};
+ _DefaultOrder->
+ {allow,ets:lookup(AccessData,allow),
+ deny,ets:lookup(AccessData,deny)}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The methods that validates the user %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+%Control if there is anyu autheticating data in threquest header
+%if so it controls it against the users in the list Allowed Users
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData,AllowedUsers)->
+ case getAuthenticatingDataFromHeader(Info) of
+ {user,User,PassWord}->
+ authenticateUser(Info,AccessData,AllowedUsers,
+ {user,User,PassWord});
+ {error,nouser}->
+ challenge;
+ {error,BadData}->
+ challenge
+ end.
+
+
+%----------------------------------------------------------------------
+%Returns the Autheticating data in the http-request
+%----------------------------------------------------------------------
+getAuthenticatingDataFromHeader(Info)->
+ PrsedHeader=Info#mod.parsed_header,
+ case httpd_util:key1search(PrsedHeader,"authorization" ) of
+ undefined->
+ {error,nouser};
+ [$B,$a,$s,$i,$c,$\ |EncodedString]->
+ UnCodedString=httpd_util:decode_base64(EncodedString),
+ case httpd_util:split(UnCodedString,":",2) of
+ {ok,[User,PassWord]}->
+ {user,User,PassWord};
+ {error,Error}->
+ {error,Error}
+ end;
+ BadCredentials ->
+ {error,BadCredentials}
+ end.
+
+
+%----------------------------------------------------------------------
+%Returns a list of all members of the allowed groups
+%----------------------------------------------------------------------
+getGroupMembers(Groups,AllowedGroups)->
+ Allowed=lists:foldl(fun({group,Name,Members},AllowedMembers)->
+ case lists:member(Name,AllowedGroups) of
+ true->
+ AllowedMembers++Members;
+ false ->
+ AllowedMembers
+ end
+ end,[],Groups),
+ {ok,Allowed}.
+
+authenticateUser(Info,AccessData,{{users,[]},{groups,Groups}},User)->
+ authenticateUser(Info,AccessData,{groups,Groups},User);
+authenticateUser(Info,AccessData,{{users,Users},{groups,[]}},User)->
+ authenticateUser(Info,AccessData,{users,Users},User);
+
+authenticateUser(Info,AccessData,{{users,Users},{groups,Groups}},User)->
+ AllowUser=authenticateUser(Info,AccessData,{users,Users},User),
+ AllowGroup=authenticateUser(Info,AccessData,{groups,Groups},User),
+ case {AllowGroup,AllowUser} of
+ {_,allow}->
+ allow;
+ {allow,_}->
+ allow;
+ {challenge,_}->
+ challenge;
+ {_,challenge}->
+ challenge;
+ {_deny,_deny}->
+ deny
+ end;
+
+
+%----------------------------------------------------------------------
+%Controls that the user is a member in one of the allowed group
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData,{groups,AllowedGroups},{user,User,PassWord})->
+ case getUsers(AccessData,group_file) of
+ {group_data,Groups}->
+ case getGroupMembers(Groups,AllowedGroups) of
+ {ok,Members}->
+ authenticateUser(Info,AccessData,{users,Members},
+ {user,User,PassWord});
+ {error,BadData}->
+ deny
+ end;
+ {error,BadData}->
+ deny
+ end;
+
+
+%----------------------------------------------------------------------
+%Control that the user is one of the allowed users and that the passwd is ok
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData,{users,AllowedUsers},{user,User,PassWord})->
+ case lists:member(User,AllowedUsers) of
+ true->
+ %Get the usernames and passwords from the file
+ case getUsers(AccessData,user_file) of
+ {error,BadData}->
+ deny;
+ {user_data,Users}->
+ %Users is a list of the users in
+ %the userfile [{user,User,Passwd}]
+ checkPassWord(Users,{user,User,PassWord})
+ end;
+ false ->
+ challenge
+ end.
+
+
+%----------------------------------------------------------------------
+%Control that the user User={user,"UserName","PassWd"} is
+%member of the list of Users
+%----------------------------------------------------------------------
+checkPassWord(Users,User)->
+ case lists:member(User,Users) of
+ true->
+ allow;
+ false->
+ challenge
+ end.
+
+
+%----------------------------------------------------------------------
+%Get the users in the specified file
+%UserOrGroup is an atom that specify if its a group file or a user file
+%i.e. group_file or user_file
+%----------------------------------------------------------------------
+getUsers({file,FileName},UserOrGroup)->
+ case file:open(FileName,[read]) of
+ {ok,AccessFileHandle} ->
+ getUsers({stream,AccessFileHandle},[],UserOrGroup);
+ {error,Reason} ->
+ {error,{Reason,FileName}}
+ end;
+
+
+%----------------------------------------------------------------------
+%The method that starts the lokkong for user files
+%----------------------------------------------------------------------
+
+getUsers(AccessData,UserOrGroup)->
+ case ets:lookup(AccessData,UserOrGroup) of
+ [{UserOrGroup,File}]->
+ getUsers({file,File},UserOrGroup);
+ _ ->
+ {error,noUsers}
+ end.
+
+
+%----------------------------------------------------------------------
+%Reads data from the filehandle File to the list FileData and when its
+%reach the end it returns the list in a tuple {user_file|group_file,FileData}
+%----------------------------------------------------------------------
+getUsers({stream,File},FileData,UserOrGroup)->
+ case io:get_line(File,[]) of
+ eof when UserOrGroup==user_file->
+ {user_data,FileData};
+ eof when UserOrGroup ==group_file->
+ {group_data,FileData};
+ Line ->
+ getUsers({stream,File},
+ formatUser(Line,FileData,UserOrGroup),UserOrGroup)
+ end.
+
+
+%----------------------------------------------------------------------
+%If the line is a comment remove it
+%----------------------------------------------------------------------
+formatUser([$#|UserDataComment],FileData,_UserOrgroup)->
+ FileData;
+
+
+%----------------------------------------------------------------------
+%The user name in the file is Username:Passwd\n
+%Remove the newline sign and split the user name in
+%UserName and Password
+%----------------------------------------------------------------------
+formatUser(UserData,FileData,UserOrGroup)->
+ case string:tokens(UserData," \r\n")of
+ [User|Whitespace] when UserOrGroup==user_file->
+ case string:tokens(User,":") of
+ [Name,PassWord]->
+ [{user,Name,PassWord}|FileData];
+ _Error->
+ FileData
+ end;
+ GroupData when UserOrGroup==group_file ->
+ parseGroupData(GroupData,FileData);
+ _Error ->
+ FileData
+ end.
+
+
+%----------------------------------------------------------------------
+%if everything is right GroupData is on the form
+% ["groupName:", "Member1", "Member2", "Member2"
+%----------------------------------------------------------------------
+parseGroupData([GroupName|GroupData],FileData)->
+ [{group,formatGroupName(GroupName),GroupData}|FileData].
+
+
+%----------------------------------------------------------------------
+%the line in the file is GroupName: Member1 Member2 .....MemberN
+%Remove the : from the group name
+%----------------------------------------------------------------------
+formatGroupName(GroupName)->
+ string:strip(GroupName,right,$:).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Functions that parses the accessfiles %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%Control that the asset is a real file and not a request for an virtual
+%asset
+%----------------------------------------------------------------------
+isErlScriptOrNotAccessibleFile(Path,Info)->
+ case file:read_file_info(Path) of
+ {ok,_fileInfo}->
+ false;
+ {error,_Reason} ->
+ true
+ end.
+
+
+%----------------------------------------------------------------------
+%Path=PathToTheRequestedFile=String
+%Innfo=record#mod
+%----------------------------------------------------------------------
+getHtAccessData(Path,Info)->
+ HtAccessFileNames=getHtAccessFileNames(Info),
+ case getData(Path,Info,HtAccessFileNames) of
+ {ok,public}->
+ {ok,public};
+ {accessData,AccessData}->
+ {accessData,AccessData};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+
+%----------------------------------------------------------------------
+%returns the names of the accessfiles
+%----------------------------------------------------------------------
+getHtAccessFileNames(Info)->
+ case httpd_util:lookup(Info#mod.config_db,access_files) of
+ undefined->
+ [".htaccess"];
+ Files->
+ Files
+ end.
+%----------------------------------------------------------------------
+%HtAccessFileNames=["accessfileName1",..."AccessFileName2"]
+%----------------------------------------------------------------------
+getData(Path,Info,HtAccessFileNames)->
+ case regexp:split(Path,"/") of
+ {error,Error}->
+ {error,Error};
+ {ok,SplittedPath}->
+ getData2(HtAccessFileNames,SplittedPath,Info)
+ end.
+
+
+%----------------------------------------------------------------------
+%Add to together the data in the Splittedpath up to the path
+%that is the alias or the document root
+%Since we do not need to control after any accessfiles before here
+%----------------------------------------------------------------------
+getData2(HtAccessFileNames,SplittedPath,Info)->
+ case getRootPath(SplittedPath,Info) of
+ {error,Path}->
+ {error,Path};
+ {ok,StartPath,RestOfSplittedPath} ->
+ getData2(HtAccessFileNames,StartPath,RestOfSplittedPath,Info)
+ end.
+
+
+%----------------------------------------------------------------------
+%HtAccessFilenames is a list the names the accesssfiles can have
+%Path is the shortest match agains all alias and documentroot
+%rest of splitted path is a list of the parts of the path
+%Info is the mod recod from the server
+%----------------------------------------------------------------------
+getData2(HtAccessFileNames,StartPath,RestOfSplittedPath,Info)->
+ case getHtAccessFiles(HtAccessFileNames,StartPath,RestOfSplittedPath) of
+ []->
+ %No accessfile qiut its a public directory
+ {ok,public};
+ Files ->
+ loadAccessFilesData(Files)
+ end.
+
+
+%----------------------------------------------------------------------
+%Loads the data in the accessFiles specifiied by
+% AccessFiles=["/hoem/public/html/accefile",
+% "/home/public/html/priv/accessfile"]
+%----------------------------------------------------------------------
+loadAccessFilesData(AccessFiles)->
+ loadAccessFilesData(AccessFiles,ets:new(accessData,[])).
+
+
+%----------------------------------------------------------------------
+%Returns the found data
+%----------------------------------------------------------------------
+contextToValues(AccessData)->
+ case ets:lookup(AccessData,context) of
+ [{context,Values}]->
+ ets:delete(AccessData,context),
+ insertContext(AccessData,Values),
+ {accessData,AccessData};
+ _Error->
+ {error,errorInAccessFile}
+ end.
+
+
+insertContext(AccessData,[])->
+ ok;
+
+insertContext(AccessData,[{allow,From}|Values])->
+ insertDenyAllowContext(AccessData,{allow,From}),
+ insertContext(AccessData,Values);
+
+insertContext(AccessData,[{deny,From}|Values])->
+ insertDenyAllowContext(AccessData,{deny,From}),
+ insertContext(AccessData,Values);
+
+insertContext(AccessData,[{require,{GrpOrUsr,Members}}|Values])->
+ case ets:lookup(AccessData,require) of
+ []when GrpOrUsr==users->
+ ets:insert(AccessData,{require,{{users,Members},{groups,[]}}});
+
+ [{require,{{users,Users},{groups,Groups}}}]when GrpOrUsr==users ->
+ ets:insert(AccessData,{require,{{users,Users++Members},
+ {groups,Groups}}});
+ []when GrpOrUsr==groups->
+ ets:insert(AccessData,{require,{{users,[]},{groups,Members}}});
+
+ [{require,{{users,Users},{groups,Groups}}}]when GrpOrUsr==groups ->
+ ets:insert(AccessData,{require,{{users,Users},
+ {groups,Groups++Members}}})
+ end,
+ insertContext(AccessData,Values);
+
+
+
+%%limit and order directive need no transforming they areis just to insert
+insertContext(AccessData,[Elem|Values])->
+ ets:insert(AccessData,Elem),
+ insertContext(AccessData,Values).
+
+
+insertDenyAllowContext(AccessData,{AllowDeny,From})->
+ case From of
+ all->
+ ets:insert(AccessData,{AllowDeny,all});
+ AllowedSubnets->
+ case ets:lookup(AccessData,AllowDeny) of
+ []->
+ ets:insert(AccessData,{AllowDeny,From});
+ [{AllowDeny,all}]->
+ ok;
+ [{AllowDeny,Networks}]->
+ ets:insert(AccessData,{allow,Networks++From})
+ end
+ end.
+
+loadAccessFilesData([],AccessData)->
+ %preform context to limits
+ contextToValues(AccessData),
+ {accessData,AccessData};
+
+%----------------------------------------------------------------------
+%Takes each file in the list and load the data to the ets table
+%AccessData
+%----------------------------------------------------------------------
+loadAccessFilesData([FileName|FileNames],AccessData)->
+ case loadAccessFileData({file,FileName},AccessData) of
+ overRide->
+ loadAccessFilesData(FileNames,AccessData);
+ noOverRide ->
+ {accessData,AccessData};
+ error->
+ ets:delete(AccessData),
+ {error,errorInAccessFile}
+ end.
+
+%----------------------------------------------------------------------
+%opens the filehandle to the specified file
+%----------------------------------------------------------------------
+loadAccessFileData({file,FileName},AccessData)->
+ case file:open(FileName,[read]) of
+ {ok,AccessFileHandle}->
+ loadAccessFileData({stream,AccessFileHandle},AccessData,[]);
+ {error,Reason} ->
+ overRide
+ end.
+
+%----------------------------------------------------------------------
+%%look att each line in the file and add them to the database
+%%When end of file is reached control i overrride is allowed
+%% if so return
+%----------------------------------------------------------------------
+loadAccessFileData({stream,File},AccessData,FileData)->
+ case io:get_line(File,[]) of
+ eof->
+ insertData(AccessData,FileData),
+ case ets:match_object(AccessData,{'_',error}) of
+ []->
+ %Case we got no error control that we can override a
+ %at least some of the values
+ case ets:match_object(AccessData,
+ {allow_over_ride,none}) of
+ []->
+ overRide;
+ _NoOverride->
+ noOverRide
+ end;
+ Errors->
+ error
+ end;
+ Line ->
+ loadAccessFileData({stream,File},AccessData,
+ insertLine(string:strip(Line,left),FileData))
+ end.
+
+%----------------------------------------------------------------------
+%AccessData is a ets table where the previous found data is inserted
+%FileData is a list of the directives in the last parsed file
+%before insertion a control is done that the directive is allowed to
+%override
+%----------------------------------------------------------------------
+insertData(AccessData,{{context,Values},FileData})->
+ insertData(AccessData,[{context,Values}|FileData]);
+
+insertData(AccessData,FileData)->
+ case ets:lookup(AccessData,allow_over_ride) of
+ [{allow_over_ride,all}]->
+ lists:foreach(fun(Elem)->
+ ets:insert(AccessData,Elem)
+ end,FileData);
+ []->
+ lists:foreach(fun(Elem)->
+ ets:insert(AccessData,Elem)
+ end,FileData);
+ [{allow_over_ride,Directives}]when list(Directives)->
+ lists:foreach(fun({Key,Value})->
+ case lists:member(Key,Directives) of
+ true->
+ ok;
+ false ->
+ ets:insert(AccessData,{Key,Value})
+ end
+ end,FileData);
+ [{allow_over_ride,_}]->
+ %Will never appear if the user
+ %aint doing very strang econfig files
+ ok
+ end.
+%----------------------------------------------------------------------
+%Take a line in the accessfile and transform it into a tuple that
+%later can be inserted in to the ets:table
+%----------------------------------------------------------------------
+%%%Here is the alternatives that resides inside the limit context
+
+insertLine([$o,$r,$d,$e,$r|Order],{{context,Values},FileData})->
+ {{context,[{order,getOrder(Order)}|Values]},FileData};
+%%Let the user place a tab in the beginning
+insertLine([$\t,$o,$r,$d,$e,$r|Order],{{context,Values},FileData})->
+ {{context,[{order,getOrder(Order)}|Values]},FileData};
+
+insertLine([$a,$l,$l,$o,$w|Allow],{{context,Values},FileData})->
+ {{context,[{allow,getAllowDenyData(Allow)}|Values]},FileData};
+insertLine([$\t,$a,$l,$l,$o,$w|Allow],{{context,Values},FileData})->
+ {{context,[{allow,getAllowDenyData(Allow)}|Values]},FileData};
+
+insertLine([$d,$e,$n,$y|Deny],{{context,Values},FileData})->
+ {{context,[{deny,getAllowDenyData(Deny)}|Values]},FileData};
+insertLine([$\t,$d,$e,$n,$y|Deny],{{context,Values},FileData})->
+ {{context,[{deny,getAllowDenyData(Deny)}|Values]},FileData};
+
+
+insertLine([$r,$e,$q,$u,$i,$r,$e|Require],{{context,Values},FileData})->
+ {{context,[{require,getRequireData(Require)}|Values]},FileData};
+insertLine([$\t,$r,$e,$q,$u,$i,$r,$e|Require],{{context,Values},FileData})->
+ {{context,[{require,getRequireData(Require)}|Values]},FileData};
+
+
+insertLine([$<,$/,$L,$i,$m,$i,$t|EndLimit],{Context,FileData})->
+ [Context|FileData];
+
+insertLine([$<,$L,$i,$m,$i,$t|Limit],FileData)->
+ {{context,[{limit,getLimits(Limit)}]}, FileData};
+
+
+
+insertLine([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e,$\ |AuthUserFile],FileData)->
+ [{user_file,string:strip(AuthUserFile,right,$\n)}|FileData];
+
+insertLine([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e,$\ |AuthGroupFile],
+ FileData)->
+ [{group_file,string:strip(AuthGroupFile,right,$\n)}|FileData];
+
+insertLine([$A,$l,$l,$o,$w,$O,$v,$e,$r,$R,$i,$d,$e|AllowOverRide],FileData)->
+ [{allow_over_ride,getAllowOverRideData(AllowOverRide)}
+ |FileData];
+
+insertLine([$A,$u,$t,$h,$N,$a,$m,$e,$\ |AuthName],FileData)->
+ [{auth_name,string:strip(AuthName,right,$\n)}|FileData];
+
+insertLine([$A,$u,$t,$h,$T,$y,$p,$e|AuthType],FileData)->
+ [{auth_type,getAuthorizationType(AuthType)}|FileData];
+
+insertLine(_BadDirectiveOrComment,FileData)->
+ FileData.
+
+%----------------------------------------------------------------------
+%transform the Data specified about override to a form that is ieasier
+%handled later
+%Override data="all"|"md5"|"Directive1 .... DirectioveN"
+%----------------------------------------------------------------------
+
+getAllowOverRideData(OverRideData)->
+ case string:tokens(OverRideData," \r\n") of
+ [[$a,$l,$l]|_]->
+ all;
+ [[$n,$o,$n,$e]|_]->
+ none;
+ Directives ->
+ getOverRideDirectives(Directives)
+ end.
+
+getOverRideDirectives(Directives)->
+ lists:map(fun(Directive)->
+ transformDirective(Directive)
+ end,Directives).
+transformDirective([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e|_])->
+ user_file;
+transformDirective([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e|_]) ->
+ group_file;
+transformDirective([$A,$u,$t,$h,$N,$a,$m,$e|_])->
+ auth_name;
+transformDirective([$A,$u,$t,$h,$T,$y,$p,$e|_])->
+ auth_type;
+transformDirective(_UnAllowedOverRideDirective) ->
+ unallowed.
+%----------------------------------------------------------------------
+%Replace the string that specify which method to use for authentication
+%and replace it with the atom for easier mathing
+%----------------------------------------------------------------------
+getAuthorizationType(AuthType)->
+ [Arg|Crap]=string:tokens(AuthType,"\n\r\ "),
+ case Arg of
+ [$B,$a,$s,$i,$c]->
+ basic;
+ [$M,$D,$5] ->
+ md5;
+ _What ->
+ error
+ end.
+%----------------------------------------------------------------------
+%Returns a list of the specified methods to limit or the atom all
+%----------------------------------------------------------------------
+getLimits(Limits)->
+ case regexp:split(Limits,">")of
+ {ok,[_NoEndOnLimit]}->
+ error;
+ {ok,[Methods|Crap]}->
+ case regexp:split(Methods," ")of
+ {ok,[]}->
+ all;
+ {ok,SplittedMethods}->
+ SplittedMethods;
+ {error,Error}->
+ error
+ end;
+ {error,_Error}->
+ error
+ end.
+
+
+%----------------------------------------------------------------------
+% Transform the order to prefrom deny allow control to a tuple of atoms
+%----------------------------------------------------------------------
+getOrder(Order)->
+ [First|Rest]=lists:map(fun(Part)->
+ list_to_atom(Part)
+ end,string:tokens(Order," \n\r")),
+ case First of
+ deny->
+ {deny,allow};
+ allow->
+ {allow,deny};
+ _Error->
+ error
+ end.
+
+%----------------------------------------------------------------------
+% The string AllowDeny is "from all" or "from Subnet1 Subnet2...SubnetN"
+%----------------------------------------------------------------------
+getAllowDenyData(AllowDeny)->
+ case string:tokens(AllowDeny," \n\r") of
+ [_From|AllowDenyData] when length(AllowDenyData)>=1->
+ case lists:nth(1,AllowDenyData) of
+ [$a,$l,$l]->
+ all;
+ Hosts->
+ AllowDenyData
+ end;
+ Error->
+ errror
+ end.
+%----------------------------------------------------------------------
+% Fix the string that describes who is allowed to se the page
+%----------------------------------------------------------------------
+getRequireData(Require)->
+ [UserOrGroup|UserData]=string:tokens(Require," \n\r"),
+ case UserOrGroup of
+ [$u,$s,$e,$r]->
+ {users,UserData};
+ [$g,$r,$o,$u,$p] ->
+ {groups,UserData};
+ _Whatever ->
+ error
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Methods that collects the searchways to the accessfiles %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+% Get the whole path to the different accessfiles
+%----------------------------------------------------------------------
+getHtAccessFiles(HtAccessFileNames,Path,RestOfSplittedPath)->
+ getHtAccessFiles(HtAccessFileNames,Path,RestOfSplittedPath,[]).
+
+getHtAccessFiles(HtAccessFileNames,Path,[[]],HtAccessFiles)->
+ HtAccessFiles ++ accessFilesOfPath(HtAccessFileNames,Path++"/");
+
+getHtAccessFiles(HtAccessFileNames,Path,[],HtAccessFiles)->
+ HtAccessFiles;
+getHtAccessFiles(HtAccessFileNames,Path,[NextDir|RestOfSplittedPath],
+ AccessFiles)->
+ getHtAccessFiles(HtAccessFileNames,Path++"/"++NextDir,RestOfSplittedPath,
+ AccessFiles ++
+ accessFilesOfPath(HtAccessFileNames,Path++"/")).
+
+
+%----------------------------------------------------------------------
+%Control if therer are any accessfies in the path
+%----------------------------------------------------------------------
+accessFilesOfPath(HtAccessFileNames,Path)->
+ lists:foldl(fun(HtAccessFileName,Files)->
+ case file:read_file_info(Path++HtAccessFileName) of
+ {ok,FileInfo}->
+ [Path++HtAccessFileName|Files];
+ {error,_Error} ->
+ Files
+ end
+ end,[],HtAccessFileNames).
+
+
+%----------------------------------------------------------------------
+%Sake the splitted path and joins it up to the documentroot or the alias
+%that match first
+%----------------------------------------------------------------------
+
+getRootPath(SplittedPath,Info)->
+ DocRoot=httpd_util:lookup(Info#mod.config_db,document_root,"/"),
+ PresumtiveRootPath=
+ [DocRoot|lists:map(fun({Alias,RealPath})->
+ RealPath
+ end,
+ httpd_util:multi_lookup(Info#mod.config_db,alias))],
+ getRootPath(PresumtiveRootPath,SplittedPath,Info).
+
+
+getRootPath(PresumtiveRootPath,[[],Splittedpath],Info)->
+ getRootPath(PresumtiveRootPath,["/",Splittedpath],Info);
+
+
+getRootPath(PresumtiveRootPath,[Part,NextPart|SplittedPath],Info)->
+ case lists:member(Part,PresumtiveRootPath)of
+ true->
+ {ok,Part,[NextPart|SplittedPath]};
+ false ->
+ getRootPath(PresumtiveRootPath,
+ [Part++"/"++NextPart|SplittedPath],Info)
+ end;
+
+getRootPath(PresumtiveRootPath,[Part],Info)->
+ case lists:member(Part,PresumtiveRootPath)of
+ true->
+ {ok,Part,[]};
+ false ->
+ {error,Part}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%Debug methods %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+% Simulate the webserver by calling do/1 with apropiate parameters
+%----------------------------------------------------------------------
+debug()->
+ Conf=getConfigData(),
+ Uri=getUri(),
+ {_Proceed,Data}=getDataFromAlias(Conf,Uri),
+ Init_data=#init_data{peername={socket,"127.0.0.1"}},
+ ParsedHeader=headerparts(),
+ do(#mod{init_data=Init_data,
+ data=Data,
+ config_db=Conf,
+ request_uri=Uri,
+ parsed_header=ParsedHeader,
+ method="GET"}).
+
+%----------------------------------------------------------------------
+%Add authenticate data to the fake http-request header
+%----------------------------------------------------------------------
+headerparts()->
+ [{"authorization","Basic " ++ httpd_util:encode_base64("lotta:potta")}].
+
+getDataFromAlias(Conf,Uri)->
+ mod_alias:do(#mod{config_db=Conf,request_uri=Uri}).
+
+getUri()->
+ "/appmon/test/test.html".
+
+getConfigData()->
+ Tab=ets:new(test_inets,[bag,public]),
+ ets:insert(Tab,{server_name,"localhost"}),
+ ets:insert(Tab,{bind_addresss,{127,0,0,1}}),
+ ets:insert(Tab,{erl_script_alias,{"/webcover/erl",["webcover"]}}),
+ ets:insert(Tab,{erl_script_alias,{"/erl",["webappmon"]}}),
+ ets:insert(Tab,{com_type,ip_comm}),
+ ets:insert(Tab,{modules,[mod_alias,mod_auth,mod_header]}),
+ ets:insert(Tab,{default_type,"text/plain"}),
+ ets:insert(Tab,{server_root,
+ "/home/gandalf/marting/exjobb/webtool-1.0/priv/root"}),
+ ets:insert(Tab,{port,8888}),
+ ets:insert(Tab,{document_root,
+ "/home/gandalf/marting/exjobb/webtool-1.0/priv/root"}),
+ ets:insert(Tab,
+ {alias,
+ {"/appmon"
+ ,"/home/gandalf/marting/exjobb/webappmon-1.0/priv"}}),
+ ets:insert(Tab,{alias,
+ {"/webcover"
+ ,"/home/gandalf/marting/exjobb/webcover-1.0/priv"}}),
+ ets:insert(Tab,{access_file,[".htaccess","kalle","pelle"]}),
+ Tab.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_include.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_include.erl
new file mode 100644
index 0000000000..c93e0a4f59
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_include.erl
@@ -0,0 +1,726 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_include.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_include).
+-export([do/1,parse/2,config/6,include/6,echo/6,fsize/6,flastmod/6,exec/6]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"INCLUDE").
+-include("httpd_verbosity.hrl").
+
+%% do
+
+do(Info) ->
+ ?vtrace("do",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data, response) of
+ %% No response has been generated!
+ undefined ->
+ do_include(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_include(Info) ->
+ ?vtrace("do_include -> entry with"
+ "~n URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ Suffix = httpd_util:suffix(Path),
+ case httpd_util:lookup_mime_default(Info#mod.config_db,Suffix) of
+ "text/x-server-parsed-html" ->
+ HeaderStart =
+ httpd_util:header(200, "text/html", Info#mod.connection),
+ ?vtrace("do_include -> send ~p", [Path]),
+ case send_in(Info,Path,HeaderStart,file:read_file_info(Path)) of
+ {ok, ErrorLog, Size} ->
+ ?vtrace("do_include -> sent ~w bytes", [Size]),
+ {proceed,[{response,{already_sent,200,Size}},
+ {mime_type,"text/html"}|
+ lists:append(ErrorLog,Info#mod.data)]};
+ {error, Reason} ->
+ ?vlog("send in failed:"
+ "~n Reason: ~p"
+ "~n Path: ~p"
+ "~n Info: ~p",
+ [Reason,Info,Path]),
+ {proceed,
+ [{status,send_error(Reason,Info,Path)}|Info#mod.data]}
+ end;
+ _ -> %% Unknown mime type, ignore
+ {proceed,Info#mod.data}
+ end.
+
+
+%%
+%% config directive
+%%
+
+config(Info, Context, ErrorLog, TagList, ValueList, R) ->
+ case verify_tags("config",[errmsg,timefmt,sizefmt],
+ TagList,ValueList) of
+ ok ->
+ {ok,update_context(TagList,ValueList,Context),ErrorLog,"",R};
+ {error,Reason} ->
+ {ok,Context,[{internal_info,Reason}|ErrorLog],
+ httpd_util:key1search(Context,errmsg,""),R}
+ end.
+
+update_context([],[],Context) ->
+ Context;
+update_context([Tag|R1],[Value|R2],Context) ->
+ update_context(R1,R2,[{Tag,Value}|Context]).
+
+verify_tags(Command,ValidTags,TagList,ValueList) when length(TagList)==length(ValueList) ->
+ verify_tags(Command,ValidTags,TagList);
+verify_tags(Command,ValidTags,TagList,ValueList) ->
+ {error,?NICE(Command++" directive has spurious tags")}.
+
+verify_tags(Command, ValidTags, []) ->
+ ok;
+verify_tags(Command, ValidTags, [Tag|Rest]) ->
+ case lists:member(Tag, ValidTags) of
+ true ->
+ verify_tags(Command, ValidTags, Rest);
+ false ->
+ {error,?NICE(Command++" directive has a spurious tag ("++
+ atom_to_list(Tag)++")")}
+ end.
+
+%%
+%% include directive
+%%
+
+include(Info,Context,ErrorLog,[virtual],[VirtualPath],R) ->
+ Aliases = httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_, Path, _AfterPath} =
+ mod_alias:real_name(Info#mod.config_db, VirtualPath, Aliases),
+ include(Info,Context,ErrorLog,R,Path);
+include(Info, Context, ErrorLog, [file], [FileName], R) ->
+ Path = file(Info#mod.config_db, Info#mod.request_uri, FileName),
+ include(Info, Context, ErrorLog, R, Path);
+include(Info, Context, ErrorLog, TagList, ValueList, R) ->
+ {ok, Context,
+ [{internal_info,?NICE("include directive has a spurious tag")}|
+ ErrorLog], httpd_util:key1search(Context, errmsg, ""), R}.
+
+include(Info, Context, ErrorLog, R, Path) ->
+ ?DEBUG("include -> read file: ~p",[Path]),
+ case file:read_file(Path) of
+ {ok, Body} ->
+ ?DEBUG("include -> size(Body): ~p",[size(Body)]),
+ {ok, NewContext, NewErrorLog, Result} =
+ parse(Info, binary_to_list(Body), Context, ErrorLog, []),
+ {ok, Context, NewErrorLog, Result, R};
+ {error, Reason} ->
+ {ok, Context,
+ [{internal_info, ?NICE("Can't open "++Path)}|ErrorLog],
+ httpd_util:key1search(Context, errmsg, ""), R}
+ end.
+
+file(ConfigDB, RequestURI, FileName) ->
+ Aliases = httpd_util:multi_lookup(ConfigDB, alias),
+ {_, Path, _AfterPath}
+ = mod_alias:real_name(ConfigDB, RequestURI, Aliases),
+ Pwd = filename:dirname(Path),
+ filename:join(Pwd, FileName).
+
+%%
+%% echo directive
+%%
+
+echo(Info,Context,ErrorLog,[var],["DOCUMENT_NAME"],R) ->
+ {ok,Context,ErrorLog,document_name(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,[var],["DOCUMENT_URI"],R) ->
+ {ok,Context,ErrorLog,document_uri(Info#mod.config_db,
+ Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,[var],["QUERY_STRING_UNESCAPED"],R) ->
+ {ok,Context,ErrorLog,query_string_unescaped(Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,[var],["DATE_LOCAL"],R) ->
+ {ok,Context,ErrorLog,date_local(),R};
+echo(Info,Context,ErrorLog,[var],["DATE_GMT"],R) ->
+ {ok,Context,ErrorLog,date_gmt(),R};
+echo(Info,Context,ErrorLog,[var],["LAST_MODIFIED"],R) ->
+ {ok,Context,ErrorLog,last_modified(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ {ok,Context,
+ [{internal_info,?NICE("echo directive has a spurious tag")}|
+ ErrorLog],"(none)",R}.
+
+document_name(Data,ConfigDB,RequestURI) ->
+ Path = mod_alias:path(Data,ConfigDB,RequestURI),
+ case regexp:match(Path,"[^/]*\$") of
+ {match,Start,Length} ->
+ string:substr(Path,Start,Length);
+ nomatch ->
+ "(none)"
+ end.
+
+document_uri(ConfigDB, RequestURI) ->
+ Aliases = httpd_util:multi_lookup(ConfigDB, alias),
+ {Path, AfterPath} =
+ case mod_alias:real_name(ConfigDB, RequestURI, Aliases) of
+ {_, Name, {[], []}} ->
+ {Name, ""};
+ {_, Name, {PathInfo, []}} ->
+ {Name, "/"++PathInfo};
+ {_, Name, {PathInfo, QueryString}} ->
+ {Name, "/"++PathInfo++"?"++QueryString};
+ {_, Name, _} ->
+ {Name, ""};
+ Gurka ->
+ io:format("Gurka: ~p~n", [Gurka])
+ end,
+ VirtualPath = string:substr(RequestURI, 1,
+ length(RequestURI)-length(AfterPath)),
+ {match, Start, Length} = regexp:match(Path,"[^/]*\$"),
+ FileName = string:substr(Path,Start,Length),
+ case regexp:match(VirtualPath, FileName++"\$") of
+ {match, _, _} ->
+ httpd_util:decode_hex(VirtualPath)++AfterPath;
+ nomatch ->
+ string:strip(httpd_util:decode_hex(VirtualPath),right,$/)++
+ "/"++FileName++AfterPath
+ end.
+
+query_string_unescaped(RequestURI) ->
+ case regexp:match(RequestURI,"[\?].*\$") of
+ {match,Start,Length} ->
+ %% Escape all shell-special variables with \
+ escape(string:substr(RequestURI,Start+1,Length-1));
+ nomatch ->
+ "(none)"
+ end.
+
+escape([]) -> [];
+escape([$;|R]) -> [$\\,$;|escape(R)];
+escape([$&|R]) -> [$\\,$&|escape(R)];
+escape([$(|R]) -> [$\\,$(|escape(R)];
+escape([$)|R]) -> [$\\,$)|escape(R)];
+escape([$||R]) -> [$\\,$||escape(R)];
+escape([$^|R]) -> [$\\,$^|escape(R)];
+escape([$<|R]) -> [$\\,$<|escape(R)];
+escape([$>|R]) -> [$\\,$>|escape(R)];
+escape([$\n|R]) -> [$\\,$\n|escape(R)];
+escape([$ |R]) -> [$\\,$ |escape(R)];
+escape([$\t|R]) -> [$\\,$\t|escape(R)];
+escape([C|R]) -> [C|escape(R)].
+
+date_local() ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=calendar:local_time(),
+ %% Time format hard-wired to: "%a %b %e %T %Y" according to strftime(3)
+ io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w ~w",
+ [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
+ httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
+
+date_gmt() ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=calendar:universal_time(),
+ %% Time format hard-wired to: "%a %b %e %T %Z %Y" according to strftime(3)
+ io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w GMT ~w",
+ [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
+ httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
+
+last_modified(Data,ConfigDB,RequestURI) ->
+ {ok,FileInfo}=file:read_file_info(mod_alias:path(Data,ConfigDB,RequestURI)),
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w ~w",
+ [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
+ httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
+
+%%
+%% fsize directive
+%%
+
+fsize(Info,Context,ErrorLog,[virtual],[VirtualPath],R) ->
+ Aliases=httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_,Path,AfterPath}=
+ mod_alias:real_name(Info#mod.config_db,VirtualPath,Aliases),
+ fsize(Info, Context, ErrorLog, R, Path);
+fsize(Info,Context,ErrorLog,[file],[FileName],R) ->
+ Path=file(Info#mod.config_db,Info#mod.request_uri,FileName),
+ fsize(Info,Context,ErrorLog,R,Path);
+fsize(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ {ok,Context,[{internal_info,?NICE("fsize directive has a spurious tag")}|
+ ErrorLog],httpd_util:key1search(Context,errmsg,""),R}.
+
+fsize(Info,Context,ErrorLog,R,Path) ->
+ case file:read_file_info(Path) of
+ {ok,FileInfo} ->
+ case httpd_util:key1search(Context,sizefmt) of
+ "bytes" ->
+ {ok,Context,ErrorLog,
+ integer_to_list(FileInfo#file_info.size),R};
+ "abbrev" ->
+ Size = integer_to_list(trunc(FileInfo#file_info.size/1024+1))++"k",
+ {ok,Context,ErrorLog,Size,R};
+ Value->
+ {ok,Context,
+ [{internal_info,
+ ?NICE("fsize directive has a spurious tag value ("++
+ Value++")")}|
+ ErrorLog],
+ httpd_util:key1search(Context, errmsg, ""), R}
+ end;
+ {error,Reason} ->
+ {ok,Context,[{internal_info,?NICE("Can't open "++Path)}|ErrorLog],
+ httpd_util:key1search(Context,errmsg,""),R}
+ end.
+
+%%
+%% flastmod directive
+%%
+
+flastmod(Info, Context, ErrorLog, [virtual], [VirtualPath],R) ->
+ Aliases=httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_,Path,AfterPath}=
+ mod_alias:real_name(Info#mod.config_db,VirtualPath,Aliases),
+ flastmod(Info,Context,ErrorLog,R,Path);
+flastmod(Info, Context, ErrorLog, [file], [FileName], R) ->
+ Path = file(Info#mod.config_db, Info#mod.request_uri, FileName),
+ flastmod(Info, Context, ErrorLog, R, Path);
+flastmod(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ {ok,Context,[{internal_info,?NICE("flastmod directive has a spurious tag")}|
+ ErrorLog],httpd_util:key1search(Context,errmsg,""),R}.
+
+flastmod(Info,Context,ErrorLog,R,File) ->
+ case file:read_file_info(File) of
+ {ok,FileInfo} ->
+ {{Yr,Mon,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ Result=
+ io_lib:format("~s ~s ~2w ~w:~w:~w ~w",
+ [httpd_util:day(
+ calendar:day_of_the_week(Yr,Mon, Day)),
+ httpd_util:month(Mon),Day,Hour,Minute,Second, Yr]),
+ {ok,Context,ErrorLog,Result,R};
+ {error,Reason} ->
+ {ok,Context,[{internal_info,?NICE("Can't open "++File)}|ErrorLog],
+ httpd_util:key1search(Context,errmsg,""),R}
+ end.
+
+%%
+%% exec directive
+%%
+
+exec(Info,Context,ErrorLog,[cmd],[Command],R) ->
+ ?vtrace("exec cmd:~n Command: ~p",[Command]),
+ cmd(Info,Context,ErrorLog,R,Command);
+exec(Info,Context,ErrorLog,[cgi],[RequestURI],R) ->
+ ?vtrace("exec cgi:~n RequestURI: ~p",[RequestURI]),
+ cgi(Info,Context,ErrorLog,R,RequestURI);
+exec(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ ?vtrace("exec with spurious tag:"
+ "~n TagList: ~p"
+ "~n ValueList: ~p",
+ [TagList,ValueList]),
+ {ok, Context,
+ [{internal_info,?NICE("exec directive has a spurious tag")}|
+ ErrorLog], httpd_util:key1search(Context,errmsg,""),R}.
+
+%% cmd
+
+cmd(Info, Context, ErrorLog, R, Command) ->
+ process_flag(trap_exit,true),
+ Env = env(Info),
+ Dir = filename:dirname(Command),
+ Port = (catch open_port({spawn,Command},[stream,{cd,Dir},{env,Env}])),
+ case Port of
+ P when port(P) ->
+ {NewErrorLog, Result} = proxy(Port, ErrorLog),
+ {ok, Context, NewErrorLog, Result, R};
+ {'EXIT', Reason} ->
+ ?vlog("open port failed: exit"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Info#mod.request_uri,Reason]),
+ exit({open_port_failed,Reason,
+ [{uri,Info#mod.request_uri},{script,Command},
+ {env,Env},{dir,Dir}]});
+ O ->
+ ?vlog("open port failed: unknown result"
+ "~n URI: ~p"
+ "~n O: ~p",
+ [Info#mod.request_uri,O]),
+ exit({open_port_failed,O,
+ [{uri,Info#mod.request_uri},{script,Command},
+ {env,Env},{dir,Dir}]})
+ end.
+
+env(Info) ->
+ [{"DOCUMENT_NAME",document_name(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri)},
+ {"DOCUMENT_URI", document_uri(Info#mod.config_db, Info#mod.request_uri)},
+ {"QUERY_STRING_UNESCAPED", query_string_unescaped(Info#mod.request_uri)},
+ {"DATE_LOCAL", date_local()},
+ {"DATE_GMT", date_gmt()},
+ {"LAST_MODIFIED", last_modified(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri)}
+ ].
+
+%% cgi
+
+cgi(Info, Context, ErrorLog, R, RequestURI) ->
+ ScriptAliases = httpd_util:multi_lookup(Info#mod.config_db, script_alias),
+ case mod_alias:real_script_name(Info#mod.config_db, RequestURI,
+ ScriptAliases) of
+ {Script, AfterScript} ->
+ exec_script(Info,Script,AfterScript,ErrorLog,Context,R);
+ not_a_script ->
+ {ok, Context,
+ [{internal_info, ?NICE(RequestURI++" is not a script")}|
+ ErrorLog], httpd_util:key1search(Context, errmsg, ""),R}
+ end.
+
+remove_header([]) ->
+ [];
+remove_header([$\n,$\n|Rest]) ->
+ Rest;
+remove_header([C|Rest]) ->
+ remove_header(Rest).
+
+
+exec_script(Info,Script,AfterScript,ErrorLog,Context,R) ->
+ process_flag(trap_exit,true),
+ Aliases = httpd_util:multi_lookup(Info#mod.config_db, alias),
+ {_, Path, AfterPath} = mod_alias:real_name(Info#mod.config_db,
+ Info#mod.request_uri,
+ Aliases),
+ Env = env(Info)++mod_cgi:env(Info, Path, AfterPath),
+ Dir = filename:dirname(Path),
+ Port = (catch open_port({spawn,Script},[stream,{env, Env},{cd, Dir}])),
+ case Port of
+ P when port(P) ->
+ %% Send entity body to port.
+ Res = case Info#mod.entity_body of
+ [] ->
+ true;
+ EntityBody ->
+ (catch port_command(Port,EntityBody))
+ end,
+ case Res of
+ {'EXIT', Reason} ->
+ ?vlog("port send failed:"
+ "~n Port: ~p"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Port,Info#mod.request_uri,Reason]),
+ exit({open_cmd_failed,Reason,
+ [{mod,?MODULE},{port,Port},
+ {uri,Info#mod.request_uri},
+ {script,Script},{env,Env},{dir,Dir},
+ {ebody_size,sz(Info#mod.entity_body)}]});
+ true ->
+ {NewErrorLog, Result} = proxy(Port, ErrorLog),
+ {ok, Context, NewErrorLog, remove_header(Result), R}
+ end;
+ {'EXIT', Reason} ->
+ ?vlog("open port failed: exit"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Info#mod.request_uri,Reason]),
+ exit({open_port_failed,Reason,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]});
+ O ->
+ ?vlog("open port failed: unknown result"
+ "~n URI: ~p"
+ "~n O: ~p",
+ [Info#mod.request_uri,O]),
+ exit({open_port_failed,O,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]})
+ end.
+
+
+%%
+%% Port communication
+%%
+
+proxy(Port,ErrorLog) ->
+ process_flag(trap_exit, true),
+ proxy(Port, ErrorLog, []).
+
+proxy(Port, ErrorLog, Result) ->
+ receive
+ {Port, {data, Response}} ->
+ proxy(Port, ErrorLog, lists:append(Result,Response));
+ {'EXIT', Port, normal} when port(Port) ->
+ process_flag(trap_exit, false),
+ {ErrorLog, Result};
+ {'EXIT', Port, Reason} when port(Port) ->
+ process_flag(trap_exit, false),
+ {[{internal_info,
+ ?NICE("Scrambled output from CGI-script")}|ErrorLog],
+ Result};
+ {'EXIT', Pid, Reason} when pid(Pid) ->
+ process_flag(trap_exit, false),
+ {'EXIT', Pid, Reason};
+ %% This should not happen!
+ WhatEver ->
+ process_flag(trap_exit, false),
+ {ErrorLog, Result}
+ end.
+
+
+%% ------
+%% Temporary until I figure out a way to fix send_in_chunks
+%% (comments and directives that start in one chunk but end
+%% in another is not handled).
+%%
+
+send_in(Info, Path,Head, {ok,FileInfo}) ->
+ case file:read_file(Path) of
+ {ok, Bin} ->
+ send_in1(Info, binary_to_list(Bin), Head, FileInfo);
+ {error, Reason} ->
+ ?vlog("failed reading file: ~p",[Reason]),
+ {error, {open,Reason}}
+ end;
+send_in(Info,Path,Head,{error,Reason}) ->
+ ?vlog("failed open file: ~p",[Reason]),
+ {error, {open,Reason}}.
+
+send_in1(Info, Data,Head,FileInfo) ->
+ {ok, _Context, Err, ParsedBody} = parse(Info,Data,?DEFAULT_CONTEXT,[],[]),
+ Size = length(ParsedBody),
+ ?vdebug("send_in1 -> Size: ~p",[Size]),
+ Head1 = case Info#mod.http_version of
+ "HTTP/1.1"->
+ Head ++
+ "Content-Length: " ++
+ integer_to_list(Size) ++
+ "\r\nEtag:" ++
+ httpd_util:create_etag(FileInfo,Size) ++"\r\n" ++
+ "Last-Modified: " ++
+ httpd_util:rfc1123_date(FileInfo#file_info.mtime) ++
+ "\r\n\r\n";
+ _->
+ %% i.e http/1.0 and http/0.9
+ Head ++
+ "Content-Length: " ++
+ integer_to_list(Size) ++
+ "\r\nLast-Modified: " ++
+ httpd_util:rfc1123_date(FileInfo#file_info.mtime) ++
+ "\r\n\r\n"
+ end,
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [Head1,ParsedBody]),
+ {ok, Err, Size}.
+
+
+
+%%
+%% Addition to "Fuzzy" HTML parser. This is actually a ugly hack to
+%% avoid putting to much data on the heap. To be rewritten...
+%%
+
+% -define(CHUNK_SIZE, 4096).
+
+% send_in_chunks(Info, Path) ->
+% ?DEBUG("send_in_chunks -> Path: ~p",[Path]),
+% case file:open(Path, [read, raw]) of
+% {ok, Stream} ->
+% send_in_chunks(Info, Stream, ?DEFAULT_CONTEXT,[]);
+% {error, Reason} ->
+% ?ERROR("Failed open file: ~p",[Reason]),
+% {error, {open,Reason}}
+% end.
+
+% send_in_chunks(Info, Stream, Context, ErrorLog) ->
+% case file:read(Stream, ?CHUNK_SIZE) of
+% {ok, Data} ->
+% ?DEBUG("send_in_chunks -> read ~p bytes",[length(Data)]),
+% {ok, NewContext, NewErrorLog, ParsedBody}=
+% parse(Info, Data, Context, ErrorLog, []),
+% httpd_socket:deliver(Info#mod.socket_type,
+% Info#mod.socket, ParsedBody),
+% send_in_chunks(Info,Stream,NewContext,NewErrorLog);
+% eof ->
+% {ok, ErrorLog};
+% {error, Reason} ->
+% ?ERROR("Failed read from file: ~p",[Reason]),
+% {error, {read,Reason}}
+% end.
+
+
+%%
+%% "Fuzzy" HTML parser
+%%
+
+parse(Info,Body) ->
+ parse(Info, Body, ?DEFAULT_CONTEXT, [], []).
+
+parse(Info, [], Context, ErrorLog, Result) ->
+ {ok, Context, lists:reverse(ErrorLog), lists:reverse(Result)};
+parse(Info,[$<,$!,$-,$-,$#|R1],Context,ErrorLog,Result) ->
+ ?DEBUG("parse -> start command directive when length(R1): ~p",[length(R1)]),
+ case catch parse0(R1,Context) of
+ {parse_error,Reason} ->
+ parse(Info,R1,Context,[{internal_info,?NICE(Reason)}|ErrorLog],
+ [$#,$-,$-,$!,$<|Result]);
+ {ok,Context,Command,TagList,ValueList,R2} ->
+ ?DEBUG("parse -> Command: ~p",[Command]),
+ {ok,NewContext,NewErrorLog,MoreResult,R3}=
+ handle(Info,Context,ErrorLog,Command,TagList,ValueList,R2),
+ parse(Info,R3,NewContext,NewErrorLog,lists:reverse(MoreResult)++Result)
+ end;
+parse(Info,[$<,$!,$-,$-|R1],Context,ErrorLog,Result) ->
+ ?DEBUG("parse -> start comment when length(R1) = ~p",[length(R1)]),
+ case catch parse5(R1,[],0) of
+ {parse_error,Reason} ->
+ ?ERROR("parse -> parse error: ~p",[Reason]),
+ parse(Info,R1,Context,[{internal_info,?NICE(Reason)}|ErrorLog],Result);
+ {Comment,R2} ->
+ ?DEBUG("parse -> length(Comment) = ~p, length(R2) = ~p",
+ [length(Comment),length(R2)]),
+ parse(Info,R2,Context,ErrorLog,Comment++Result)
+ end;
+parse(Info,[C|R],Context,ErrorLog,Result) ->
+ parse(Info,R,Context,ErrorLog,[C|Result]).
+
+handle(Info,Context,ErrorLog,Command,TagList,ValueList,R) ->
+ case catch apply(?MODULE,Command,[Info,Context,ErrorLog,TagList,ValueList,
+ R]) of
+ {'EXIT',{undef,_}} ->
+ throw({parse_error,"Unknown command "++atom_to_list(Command)++
+ " in parsed doc"});
+ Result ->
+ Result
+ end.
+
+parse0([],Context) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse0([$-,$-,$>|R],Context) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse0([$ |R],Context) ->
+ parse0(R,Context);
+parse0(String,Context) ->
+ parse1(String,Context,"").
+
+parse1([],Context,Command) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse1([$-,$-,$>|R],Context,Command) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse1([$ |R],Context,Command) ->
+ parse2(R,Context,list_to_atom(lists:reverse(Command)),[],[],"");
+parse1([C|R],Context,Command) ->
+ parse1(R,Context,[C|Command]).
+
+parse2([],Context,Command,TagList,ValueList,Tag) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse2([$-,$-,$>|R],Context,Command,TagList,ValueList,Tag) ->
+ {ok,Context,Command,TagList,ValueList,R};
+parse2([$ |R],Context,Command,TagList,ValueList,Tag) ->
+ parse2(R,Context,Command,TagList,ValueList,Tag);
+parse2([$=|R],Context,Command,TagList,ValueList,Tag) ->
+ parse3(R,Context,Command,[list_to_atom(lists:reverse(Tag))|TagList],
+ ValueList);
+parse2([C|R],Context,Command,TagList,ValueList,Tag) ->
+ parse2(R,Context,Command,TagList,ValueList,[C|Tag]).
+
+parse3([],Context,Command,TagList,ValueList) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse3([$-,$-,$>|R],Context,Command,TagList,ValueList) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse3([$ |R],Context,Command,TagList,ValueList) ->
+ parse3(R,Context,Command,TagList,ValueList);
+parse3([$"|R],Context,Command,TagList,ValueList) ->
+ parse4(R,Context,Command,TagList,ValueList,"");
+parse3(String,Context,Command,TagList,ValueList) ->
+ throw({parse_error,"Premature EOF in parsed file"}).
+
+parse4([],Context,Command,TagList,ValueList,Value) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse4([$-,$-,$>|R],Context,Command,TagList,ValueList,Value) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse4([$"|R],Context,Command,TagList,ValueList,Value) ->
+ parse2(R,Context,Command,TagList,[lists:reverse(Value)|ValueList],"");
+parse4([C|R],Context,Command,TagList,ValueList,Value) ->
+ parse4(R,Context,Command,TagList,ValueList,[C|Value]).
+
+parse5([],Comment,Depth) ->
+ ?ERROR("parse5 -> unterminated comment of ~p bytes when Depth = ~p",
+ [length(Comment),Depth]),
+ throw({parse_error,"Premature EOF in parsed file"});
+parse5([$<,$!,$-,$-|R],Comment,Depth) ->
+ parse5(R,[$-,$-,$!,$<|Comment],Depth+1);
+parse5([$-,$-,$>|R],Comment,0) ->
+ {">--"++Comment++"--!<",R};
+parse5([$-,$-,$>|R],Comment,Depth) ->
+ parse5(R,[$>,$-,$-|Comment],Depth-1);
+parse5([C|R],Comment,Depth) ->
+ parse5(R,[C|Comment],Depth).
+
+
+sz(B) when binary(B) -> {binary,size(B)};
+sz(L) when list(L) -> {list,length(L)};
+sz(_) -> undefined.
+
+
+%% send_error - Handle failure to send the file
+%%
+send_error({open,Reason},Info,Path) -> open_error(Reason,Info,Path);
+send_error({read,Reason},Info,Path) -> read_error(Reason,Info,Path).
+
+
+%% open_error - Handle file open failure
+%%
+open_error(eacces,Info,Path) ->
+ open_error(403,Info,Path,"");
+open_error(enoent,Info,Path) ->
+ open_error(404,Info,Path,"");
+open_error(enotdir,Info,Path) ->
+ open_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+open_error(emfile,_Info,Path) ->
+ open_error(500,none,Path,": To many open files");
+open_error({enfile,_},_Info,Path) ->
+ open_error(500,none,Path,": File table overflow");
+open_error(_Reason,_Info,Path) ->
+ open_error(500,none,Path,"").
+
+open_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't open "++Path++Reason)};
+open_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,?NICE("Can't open "++Path++Reason)}.
+
+read_error(_Reason,_Info,Path) ->
+ read_error(500,none,Path,"").
+
+read_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't read "++Path++Reason)};
+read_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,?NICE("Can't read "++Path++Reason)}.
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_log.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_log.erl
new file mode 100644
index 0000000000..29fa2cfd11
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_log.erl
@@ -0,0 +1,250 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_log.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_log).
+-export([do/1,error_log/5,security_log/2,load/2,store/2,remove/1]).
+
+-export([report_error/2]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"LOG").
+-include("httpd_verbosity.hrl").
+
+%% do
+
+do(Info) ->
+ AuthUser = auth_user(Info#mod.data),
+ Date = custom_date(),
+ log_internal_info(Info,Date,Info#mod.data),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ transfer_log(Info,"-",AuthUser,Date,StatusCode,0),
+ if
+ StatusCode >= 400 ->
+ error_log(Info,Date,Reason);
+ true ->
+ not_an_error
+ end,
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ {already_sent,StatusCode,Size} ->
+ transfer_log(Info,"-",AuthUser,Date,StatusCode,Size),
+ {proceed,Info#mod.data};
+ {response,Head,Body} ->
+ Size=httpd_util:key1search(Head,content_length,unknown),
+ Code=httpd_util:key1search(Head,code,unknown),
+ transfer_log(Info,"-",AuthUser,Date,Code,Size),
+ {proceed,Info#mod.data};
+ {StatusCode,Response} ->
+ transfer_log(Info,"-",AuthUser,Date,200,
+ httpd_util:flatlength(Response)),
+ {proceed,Info#mod.data};
+ undefined ->
+ transfer_log(Info,"-",AuthUser,Date,200,0),
+ {proceed,Info#mod.data}
+ end
+ end.
+
+custom_date() ->
+ LocalTime=calendar:local_time(),
+ UniversalTime=calendar:universal_time(),
+ Minutes=round(diff_in_minutes(LocalTime,UniversalTime)),
+ {{YYYY,MM,DD},{Hour,Min,Sec}}=LocalTime,
+ Date =
+ io_lib:format("~.2.0w/~.3s/~.4w:~.2.0w:~.2.0w:~.2.0w ~c~.2.0w~.2.0w",
+ [DD, httpd_util:month(MM), YYYY, Hour, Min, Sec,
+ sign(Minutes),
+ abs(Minutes) div 60, abs(Minutes) rem 60]),
+ lists:flatten(Date).
+
+diff_in_minutes(L,U) ->
+ (calendar:datetime_to_gregorian_seconds(L) -
+ calendar:datetime_to_gregorian_seconds(U))/60.
+
+sign(Minutes) when Minutes > 0 ->
+ $+;
+sign(Minutes) ->
+ $-.
+
+auth_user(Data) ->
+ case httpd_util:key1search(Data,remote_user) of
+ undefined ->
+ "-";
+ RemoteUser ->
+ RemoteUser
+ end.
+
+%% log_internal_info
+
+log_internal_info(Info,Date,[]) ->
+ ok;
+log_internal_info(Info,Date,[{internal_info,Reason}|Rest]) ->
+ error_log(Info,Date,Reason),
+ log_internal_info(Info,Date,Rest);
+log_internal_info(Info,Date,[_|Rest]) ->
+ log_internal_info(Info,Date,Rest).
+
+%% transfer_log
+
+transfer_log(Info,RFC931,AuthUser,Date,StatusCode,Bytes) ->
+ case httpd_util:lookup(Info#mod.config_db,transfer_log) of
+ undefined ->
+ no_transfer_log;
+ TransferLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ case (catch io:format(TransferLog, "~s ~s ~s [~s] \"~s\" ~w ~w~n",
+ [RemoteHost, RFC931, AuthUser,
+ Date, Info#mod.request_line,
+ StatusCode, Bytes])) of
+ ok ->
+ ok;
+ Error ->
+ error_logger:error_report(Error)
+ end
+ end.
+
+%% security log
+
+security_log(Info, Reason) ->
+ case httpd_util:lookup(Info#mod.config_db, security_log) of
+ undefined ->
+ no_security_log;
+ SecurityLog ->
+ io:format(SecurityLog,"[~s] ~s~n", [custom_date(), Reason])
+ end.
+
+%% error_log
+
+error_log(Info,Date,Reason) ->
+ case httpd_util:lookup(Info#mod.config_db, error_log) of
+ undefined ->
+ no_error_log;
+ ErrorLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ io:format(ErrorLog,"[~s] access to ~s failed for ~s, reason: ~p~n",
+ [Date,Info#mod.request_uri,RemoteHost,Reason])
+ end.
+
+error_log(SocketType,Socket,ConfigDB,{PortNumber,RemoteHost},Reason) ->
+ case httpd_util:lookup(ConfigDB,error_log) of
+ undefined ->
+ no_error_log;
+ ErrorLog ->
+ Date=custom_date(),
+ io:format(ErrorLog,"[~s] server crash for ~s, reason: ~p~n",
+ [Date,RemoteHost,Reason]),
+ ok
+ end.
+
+report_error(ConfigDB,Error) ->
+ case httpd_util:lookup(ConfigDB,error_log) of
+ undefined ->
+ no_error_log;
+ ErrorLog ->
+ Date=custom_date(),
+ io:format(ErrorLog,"[~s] reporting error: ~s~n",[Date,Error]),
+ ok
+ end.
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$T,$r,$a,$n,$s,$f,$e,$r,$L,$o,$g,$ |TransferLog],[]) ->
+ {ok,[],{transfer_log,httpd_conf:clean(TransferLog)}};
+load([$E,$r,$r,$o,$r,$L,$o,$g,$ |ErrorLog],[]) ->
+ {ok,[],{error_log,httpd_conf:clean(ErrorLog)}};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$L,$o,$g,$ |SecurityLog], []) ->
+ {ok, [], {security_log, httpd_conf:clean(SecurityLog)}}.
+
+%% store
+
+store({transfer_log,TransferLog},ConfigList) ->
+ case create_log(TransferLog,ConfigList) of
+ {ok,TransferLogStream} ->
+ {ok,{transfer_log,TransferLogStream}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({error_log,ErrorLog},ConfigList) ->
+ case create_log(ErrorLog,ConfigList) of
+ {ok,ErrorLogStream} ->
+ {ok,{error_log,ErrorLogStream}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({security_log, SecurityLog},ConfigList) ->
+ case create_log(SecurityLog, ConfigList) of
+ {ok, SecurityLogStream} ->
+ {ok, {security_log, SecurityLogStream}};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+create_log(LogFile,ConfigList) ->
+ Filename = httpd_conf:clean(LogFile),
+ case filename:pathtype(Filename) of
+ absolute ->
+ case file:open(Filename, [read,write]) of
+ {ok,LogStream} ->
+ file:position(LogStream,{eof,0}),
+ {ok,LogStream};
+ {error,_} ->
+ {error,?NICE("Can't create "++Filename)}
+ end;
+ volumerelative ->
+ case file:open(Filename, [read,write]) of
+ {ok,LogStream} ->
+ file:position(LogStream,{eof,0}),
+ {ok,LogStream};
+ {error,_} ->
+ {error,?NICE("Can't create "++Filename)}
+ end;
+ relative ->
+ case httpd_util:key1search(ConfigList,server_root) of
+ undefined ->
+ {error,
+ ?NICE(Filename++
+ " is an invalid logfile name beacuse ServerRoot is not defined")};
+ ServerRoot ->
+ AbsoluteFilename=filename:join(ServerRoot,Filename),
+ case file:open(AbsoluteFilename, [read,write]) of
+ {ok,LogStream} ->
+ file:position(LogStream,{eof,0}),
+ {ok,LogStream};
+ {error,Reason} ->
+ {error,?NICE("Can't create "++AbsoluteFilename)}
+ end
+ end
+ end.
+
+%% remove
+
+remove(ConfigDB) ->
+ lists:foreach(fun([Stream]) -> file:close(Stream) end,
+ ets:match(ConfigDB,{transfer_log,'$1'})),
+ lists:foreach(fun([Stream]) -> file:close(Stream) end,
+ ets:match(ConfigDB,{error_log,'$1'})),
+ lists:foreach(fun([Stream]) -> file:close(Stream) end,
+ ets:match(ConfigDB,{security_log,'$1'})),
+ ok.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_range.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_range.erl
new file mode 100644
index 0000000000..0728bd2d91
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_range.erl
@@ -0,0 +1,397 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_range.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_range).
+-export([do/1]).
+-include("httpd.hrl").
+
+%% do
+
+
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.parsed_header,"range") of
+ undefined ->
+ %Not a range response
+ {proceed,Info#mod.data};
+ Range ->
+ %%Control that there weren't a if-range field that stopped
+ %%The range request in favor for the whole file
+ case httpd_util:key1search(Info#mod.data,if_range) of
+ send_file ->
+ {proceed,Info#mod.data};
+ _undefined ->
+ do_get_range(Info,Range)
+ end
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_get_range(Info,Ranges) ->
+ ?DEBUG("do_get_range -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri),
+ {FileInfo, LastModified} =get_modification_date(Path),
+ send_range_response(Path,Info,Ranges,FileInfo,LastModified).
+
+
+send_range_response(Path,Info,Ranges,FileInfo,LastModified)->
+ case parse_ranges(Ranges) of
+ error->
+ ?ERROR("send_range_response-> Unparsable range request",[]),
+ {proceed,Info#mod.data};
+ {multipart,RangeList}->
+ send_multi_range_response(Path,Info,RangeList);
+ {Start,Stop}->
+ send_range_response(Path,Info,Start,Stop,FileInfo,LastModified)
+ end.
+%%More than one range specified
+%%Send a multipart reponse to the user
+%
+%%An example of an multipart range response
+
+% HTTP/1.1 206 Partial Content
+% Date:Wed 15 Nov 1995 04:08:23 GMT
+% Last-modified:Wed 14 Nov 1995 04:08:23 GMT
+% Content-type: multipart/byteranges; boundary="SeparatorString"
+%
+% --"SeparatorString"
+% Content-Type: application/pdf
+% Content-Range: bytes 500-600/1010
+% .... The data..... 101 bytes
+%
+% --"SeparatorString"
+% Content-Type: application/pdf
+% Content-Range: bytes 700-1009/1010
+% .... The data.....
+
+
+
+send_multi_range_response(Path,Info,RangeList)->
+ case file:open(Path, [raw,binary]) of
+ {ok, FileDescriptor} ->
+ file:close(FileDescriptor),
+ ?DEBUG("send_multi_range_response -> FileDescriptor: ~p",[FileDescriptor]),
+ Suffix = httpd_util:suffix(Path),
+ PartMimeType = httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Date = httpd_util:rfc1123_date(),
+ {FileInfo,LastModified}=get_modification_date(Path),
+ case valid_ranges(RangeList,Path,FileInfo) of
+ {ValidRanges,true}->
+ ?DEBUG("send_multi_range_response -> Ranges are valid:",[]),
+ %Apache breaks the standard by sending the size field in the Header.
+ Header = [{code,206},
+ {content_type,"multipart/byteranges;boundary=RangeBoundarySeparator"},
+ {etag,httpd_util:create_etag(FileInfo)},
+ {last_modified,LastModified}
+ ],
+ ?DEBUG("send_multi_range_response -> Valid Ranges: ~p",[RagneList]),
+ Body={fun send_multiranges/4,[ValidRanges,Info,PartMimeType,Path]},
+ {proceed,[{response,{response,Header,Body}}|Info#mod.data]};
+ _ ->
+ {proceed, [{status, {416,"Range not valid",bad_range_boundaries }}]}
+ end;
+ {error, Reason} ->
+ ?ERROR("do_get -> failed open file: ~p",[Reason]),
+ {proceed,Info#mod.data}
+ end.
+
+send_multiranges(ValidRanges,Info,PartMimeType,Path)->
+ ?DEBUG("send_multiranges -> Start sending the ranges",[]),
+ case file:open(Path, [raw,binary]) of
+ {ok,FileDescriptor} ->
+ lists:foreach(fun(Range)->
+ send_multipart_start(Range,Info,PartMimeType,FileDescriptor)
+ end,ValidRanges),
+ file:close(FileDescriptor),
+ %%Sends an end of the multipart
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,"\r\n--RangeBoundarySeparator--"),
+ sent;
+ _ ->
+ close
+ end.
+
+send_multipart_start({{Start,End},{StartByte,EndByte,Size}},Info,PartMimeType,FileDescriptor)when StartByte<Size->
+ PartHeader=["\r\n--RangeBoundarySeparator\r\n","Content-type: ",PartMimeType,"\r\n",
+ "Content-Range:bytes=",integer_to_list(StartByte),"-",integer_to_list(EndByte),"/",
+ integer_to_list(Size),"\r\n\r\n"],
+ send_part_start(Info#mod.socket_type,Info#mod.socket,PartHeader,FileDescriptor,Start,End);
+
+
+send_multipart_start({{Start,End},{StartByte,EndByte,Size}},Info,PartMimeType,FileDescriptor)->
+ PartHeader=["\r\n--RangeBoundarySeparator\r\n","Content-type: ",PartMimeType,"\r\n",
+ "Content-Range:bytes=",integer_to_list(Size-(StartByte-Size)),"-",integer_to_list(EndByte),"/",
+ integer_to_list(Size),"\r\n\r\n"],
+ send_part_start(Info#mod.socket_type,Info#mod.socket,PartHeader,FileDescriptor,Start,End).
+
+send_part_start(SocketType,Socket,PartHeader,FileDescriptor,Start,End)->
+ case httpd_socket:deliver(SocketType,Socket,PartHeader) of
+ ok ->
+ send_part_start(SocketType,Socket,FileDescriptor,Start,End);
+ _ ->
+ close
+ end.
+
+send_range_response(Path,Info,Start,Stop,FileInfo,LastModified)->
+ case file:open(Path, [raw,binary]) of
+ {ok, FileDescriptor} ->
+ file:close(FileDescriptor),
+ ?DEBUG("send_range_response -> FileDescriptor: ~p",[FileDescriptor]),
+ Suffix = httpd_util:suffix(Path),
+ MimeType = httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Date = httpd_util:rfc1123_date(),
+ Size = get_range_size(Start,Stop,FileInfo),
+ case valid_range(Start,Stop,FileInfo) of
+ {true,StartByte,EndByte,TotByte}->
+ Head=[{code,206},{content_type, MimeType},
+ {last_modified, LastModified},
+ {etag,httpd_util:create_etag(FileInfo)},
+ {content_range,["bytes=",integer_to_list(StartByte),"-",
+ integer_to_list(EndByte),"/",integer_to_list(TotByte)]},
+ {content_length,Size}],
+ BodyFunc=fun send_range_body/5,
+ Arg=[Info#mod.socket_type, Info#mod.socket,Path,Start,Stop],
+ {proceed,[{response,{response,Head,{BodyFunc,Arg}}}|Info#mod.data]};
+ {false,Reason} ->
+ {proceed, [{status, {416,Reason,bad_range_boundaries }}]}
+ end;
+ {error, Reason} ->
+ ?ERROR("send_range_response -> failed open file: ~p",[Reason]),
+ {proceed,Info#mod.data}
+ end.
+
+
+send_range_body(SocketType,Socket,Path,Start,End) ->
+ ?DEBUG("mod_range -> send_range_body",[]),
+ case file:open(Path, [raw,binary]) of
+ {ok,FileDescriptor} ->
+ send_part_start(SocketType,Socket,FileDescriptor,Start,End),
+ file:close(FileDescriptor);
+ _ ->
+ close
+ end.
+
+send_part_start(SocketType,Socket,FileDescriptor,Start,End) ->
+ case Start of
+ from_end ->
+ file:position(FileDescriptor,{eof,End}),
+ send_body(SocketType,Socket,FileDescriptor);
+ from_start ->
+ file:position(FileDescriptor,{bof,End}),
+ send_body(SocketType,Socket,FileDescriptor);
+ Byte when integer(Byte) ->
+ file:position(FileDescriptor,{bof,Start}),
+ send_part(SocketType,Socket,FileDescriptor,End)
+ end,
+ sent.
+
+
+%%This function could replace send_body by calling it with Start=0 end =FileSize
+%% But i gues it would be stupid when we look at performance
+send_part(SocketType,Socket,FileDescriptor,End)->
+ case file:position(FileDescriptor,{cur,0}) of
+ {ok,NewPos} ->
+ if
+ NewPos > End ->
+ ok;
+ true ->
+ Size=get_file_chunk_size(NewPos,End,?FILE_CHUNK_SIZE),
+ case file:read(FileDescriptor,Size) of
+ eof ->
+ ok;
+ {error,Reason} ->
+ ok;
+ {ok,Binary} ->
+ case httpd_socket:deliver(SocketType,Socket,Binary) of
+ socket_closed ->
+ ?LOG("send_range of body -> socket closed while sending",[]),
+ socket_close;
+ _ ->
+ send_part(SocketType,Socket,FileDescriptor,End)
+ end
+ end
+ end;
+ _->
+ ok
+ end.
+
+%% validate that the range is in the limits of the file
+valid_ranges(RangeList,Path,FileInfo)->
+ lists:mapfoldl(fun({Start,End},Acc)->
+ case Acc of
+ true ->
+ case valid_range(Start,End,FileInfo) of
+ {true,StartB,EndB,Size}->
+ {{{Start,End},{StartB,EndB,Size}},true};
+ _ ->
+ false
+ end;
+ _ ->
+ {false,false}
+ end
+ end,true,RangeList).
+
+
+
+valid_range(from_end,End,FileInfo)->
+ Size=FileInfo#file_info.size,
+ if
+ End < Size ->
+ {true,(Size+End),Size-1,Size};
+ true ->
+ false
+ end;
+valid_range(from_start,End,FileInfo)->
+ Size=FileInfo#file_info.size,
+ if
+ End < Size ->
+ {true,End,Size-1,Size};
+ true ->
+ false
+ end;
+
+valid_range(Start,End,FileInfo)when Start=<End->
+ case FileInfo#file_info.size of
+ FileSize when Start< FileSize ->
+ case FileInfo#file_info.size of
+ Size when End<Size ->
+ {true,Start,End,FileInfo#file_info.size};
+ Size ->
+ {true,Start,Size-1,Size}
+ end;
+ _->
+ {false,"The size of the range is negative"}
+ end;
+
+valid_range(Start,End,FileInfo)->
+ {false,"Range starts out of file boundaries"}.
+%% Find the modification date of the file
+get_modification_date(Path)->
+ case file:read_file_info(Path) of
+ {ok, FileInfo0} ->
+ {FileInfo0, httpd_util:rfc1123_date(FileInfo0#file_info.mtime)};
+ _ ->
+ {#file_info{},""}
+ end.
+
+%Calculate the size of the chunk to read
+
+get_file_chunk_size(Position,End,DefaultChunkSize)when (Position+DefaultChunkSize) =< End->
+ DefaultChunkSize;
+get_file_chunk_size(Position,End,DefaultChunkSize)->
+ (End-Position) +1.
+
+
+
+%Get the size of the range to send. Remember that
+%A range is from startbyte up to endbyte which means that
+%the nuber of byte in a range is (StartByte-EndByte)+1
+
+get_range_size(from_end,Stop,FileInfo)->
+ integer_to_list(-1*Stop);
+
+get_range_size(from_start,StartByte,FileInfo) ->
+ integer_to_list((((FileInfo#file_info.size)-StartByte)));
+
+get_range_size(StartByte,EndByte,FileInfo) ->
+ integer_to_list((EndByte-StartByte)+1).
+
+parse_ranges([$\ ,$b,$y,$t,$e,$s,$\=|Ranges])->
+ parse_ranges([$b,$y,$t,$e,$s,$\=|Ranges]);
+parse_ranges([$b,$y,$t,$e,$s,$\=|Ranges])->
+ case string:tokens(Ranges,", ") of
+ [Range] ->
+ parse_range(Range);
+ [Range1|SplittedRanges]->
+ {multipart,lists:map(fun parse_range/1,[Range1|SplittedRanges])}
+ end;
+%Bad unit
+parse_ranges(Ranges)->
+ io:format("Bad Ranges : ~p",[Ranges]),
+ error.
+%Parse the range specification from the request to {Start,End}
+%Start=End : Numreric string | []
+
+parse_range(Range)->
+ format_range(split_range(Range,[],[])).
+format_range({[],BytesFromEnd})->
+ {from_end,-1*(list_to_integer(BytesFromEnd))};
+format_range({StartByte,[]})->
+ {from_start,list_to_integer(StartByte)};
+format_range({StartByte,EndByte})->
+ {list_to_integer(StartByte),list_to_integer(EndByte)}.
+%Last case return the splitted range
+split_range([],Current,Other)->
+ {lists:reverse(Other),lists:reverse(Current)};
+
+split_range([$-|Rest],Current,Other)->
+ split_range(Rest,Other,Current);
+
+split_range([N|Rest],Current,End) ->
+ split_range(Rest,[N|Current],End).
+
+send_body(SocketType,Socket,FileDescriptor) ->
+ case file:read(FileDescriptor,?FILE_CHUNK_SIZE) of
+ {ok,Binary} ->
+ ?DEBUG("send_body -> send another chunk: ~p",[size(Binary)]),
+ case httpd_socket:deliver(SocketType,Socket,Binary) of
+ socket_closed ->
+ ?LOG("send_body -> socket closed while sending",[]),
+ socket_close;
+ _ ->
+ send_body(SocketType,Socket,FileDescriptor)
+ end;
+ eof ->
+ ?DEBUG("send_body -> done with this file",[]),
+ eof
+ end.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_responsecontrol.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_responsecontrol.erl
new file mode 100644
index 0000000000..c946098120
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_responsecontrol.erl
@@ -0,0 +1,337 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_responsecontrol.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+
+-module(mod_responsecontrol).
+-export([do/1]).
+
+-include("httpd.hrl").
+
+
+do(Info) ->
+ ?DEBUG("do -> response_control",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ case do_responsecontrol(Info) of
+ continue ->
+ {proceed,Info#mod.data};
+ Response ->
+ {proceed,[Response|Info#mod.data]}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+
+%%----------------------------------------------------------------------
+%%Control that the request header did not contians any limitations
+%%wheather a response shall be createed or not
+%%----------------------------------------------------------------------
+
+do_responsecontrol(Info) ->
+ ?DEBUG("do_response_control -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri),
+ case file:read_file_info(Path) of
+ {ok, FileInfo} ->
+ control(Path,Info,FileInfo);
+ _ ->
+ %% The requested asset is not a plain file and then it must
+ %% be generated everytime its requested
+ continue
+ end.
+
+%%----------------------------------------------------------------------
+%%Control the If-Match, If-None-Match, and If-Modified-Since
+%%----------------------------------------------------------------------
+
+
+%% If a client sends more then one of the if-XXXX fields in a request
+%% The standard says it does not specify the behaviuor so I specified it :-)
+%% The priority between the fields is
+%% 1.If-modified
+%% 2.If-Unmodified
+%% 3.If-Match
+%% 4.If-Nomatch
+
+%% This means if more than one of the fields are in the request the
+%% field with highest priority will be used
+
+%%If the request is a range request the If-Range field will be the winner.
+
+control(Path,Info,FileInfo)->
+ case control_range(Path,Info,FileInfo) of
+ undefined ->
+ case control_Etag(Path,Info,FileInfo) of
+ undefined ->
+ case control_modification(Path,Info,FileInfo) of
+ continue ->
+ continue;
+ ReturnValue ->
+ send_return_value(ReturnValue,FileInfo)
+ end;
+ continue ->
+ continue;
+ ReturnValue ->
+ send_return_value(ReturnValue,FileInfo)
+ end;
+ Response->
+ Response
+ end.
+
+%%----------------------------------------------------------------------
+%%If there are both a range and an if-range field control if
+%%----------------------------------------------------------------------
+control_range(Path,Info,FileInfo) ->
+ case httpd_util:key1search(Info#mod.parsed_header,"range") of
+ undefined->
+ undefined;
+ _Range ->
+ case httpd_util:key1search(Info#mod.parsed_header,"if-range") of
+ undefined ->
+ undefined;
+ EtagOrDate ->
+ control_if_range(Path,Info,FileInfo,EtagOrDate)
+ end
+ end.
+
+control_if_range(Path,Info,FileInfo,EtagOrDate) ->
+ case httpd_util:convert_request_date(strip_date(EtagOrDate)) of
+ bad_date ->
+ FileEtag=httpd_util:create_etag(FileInfo),
+ case FileEtag of
+ EtagOrDate ->
+ continue;
+ _ ->
+ {if_range,send_file}
+ end;
+ ErlDate ->
+ %%We got the date in the request if it is
+ case control_modification_data(Info,FileInfo#file_info.mtime,"if-range") of
+ modified ->
+ {if_range,send_file};
+ _UnmodifiedOrUndefined->
+ continue
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Controls the values of the If-Match and I-None-Mtch
+%%----------------------------------------------------------------------
+control_Etag(Path,Info,FileInfo)->
+ FileEtag=httpd_util:create_etag(FileInfo),
+ %%Control if the E-Tag for the resource matches one of the Etags in
+ %%the -if-match header field
+ case control_match(Info,FileInfo,"if-match",FileEtag) of
+ nomatch ->
+ %%None of the Etags in the if-match field matched the current
+ %%Etag for the resource return a 304
+ {412,Info,Path};
+ match ->
+ continue;
+ undefined ->
+ case control_match(Info,FileInfo,"if-none-match",FileEtag) of
+ nomatch ->
+ continue;
+ match ->
+ case Info#mod.method of
+ "GET" ->
+ {304,Info,Path};
+ "HEAD" ->
+ {304,Info,Path};
+ _OtherrequestMethod ->
+ {412,Info,Path}
+ end;
+ undefined ->
+ undefined
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Control if there are any Etags for HeaderField in the request if so
+%%Control if they match the Etag for the requested file
+%%----------------------------------------------------------------------
+control_match(Info,FileInfo,HeaderField,FileEtag)->
+ case split_etags(httpd_util:key1search(Info#mod.parsed_header,HeaderField)) of
+ undefined->
+ undefined;
+ Etags->
+ %%Control that the match any star not is availible
+ case lists:member("*",Etags) of
+ true->
+ match;
+ false->
+ compare_etags(FileEtag,Etags)
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Split the etags from the request
+%%----------------------------------------------------------------------
+split_etags(undefined)->
+ undefined;
+split_etags(Tags) ->
+ string:tokens(Tags,", ").
+
+%%----------------------------------------------------------------------
+%%Control if the etag for the file is in the list
+%%----------------------------------------------------------------------
+compare_etags(Tag,Etags) ->
+ case lists:member(Tag,Etags) of
+ true ->
+ match;
+ _ ->
+ nomatch
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%%Control if the file is modificated %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%----------------------------------------------------------------------
+%%Control the If-Modified-Since and If-Not-Modified-Since header fields
+%%----------------------------------------------------------------------
+control_modification(Path,Info,FileInfo)->
+ ?DEBUG("control_modification() -> entry",[]),
+ case control_modification_data(Info,FileInfo#file_info.mtime,"if-modified-since") of
+ modified->
+ continue;
+ unmodified->
+ {304,Info,Path};
+ undefined ->
+ case control_modification_data(Info,FileInfo#file_info.mtime,"if-unmodified-since") of
+ modified ->
+ {412,Info,Path};
+ _ContinueUndefined ->
+ continue
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Controls the date from the http-request if-modified-since and
+%%if-not-modified-since against the modification data of the
+%%File
+%%----------------------------------------------------------------------
+%%Info is the record about the request
+%%ModificationTime is the time the file was edited last
+%%Header Field is the name of the field to control
+
+control_modification_data(Info,ModificationTime,HeaderField)->
+ case strip_date(httpd_util:key1search(Info#mod.parsed_header,HeaderField)) of
+ undefined->
+ undefined;
+ LastModified0 ->
+ LastModified=httpd_util:convert_request_date(LastModified0),
+ ?DEBUG("control_modification_data() -> "
+ "~n Request-Field: ~s"
+ "~n FileLastModified: ~p"
+ "~n FieldValue: ~p",
+ [HeaderField,ModificationTime,LastModified]),
+ case LastModified of
+ bad_date ->
+ undefined;
+ _ ->
+ FileTime=calendar:datetime_to_gregorian_seconds(ModificationTime),
+ FieldTime=calendar:datetime_to_gregorian_seconds(LastModified),
+ if
+ FileTime=<FieldTime ->
+ ?DEBUG("File unmodified~n", []),
+ unmodified;
+ FileTime>=FieldTime ->
+ ?DEBUG("File modified~n", []),
+ modified
+ end
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Compare to dates on the form {{YYYY,MM,DD},{HH,MIN,SS}}
+%%If the first date is the biggest returns biggest1 (read biggestFirst)
+%%If the first date is smaller
+% compare_date(Date,bad_date)->
+% bad_date;
+
+% compare_date({D1,T1},{D2,T2})->
+% case compare_date1(D1,D2) of
+% equal ->
+% compare_date1(T1,T2);
+% GTorLT->
+% GTorLT
+% end.
+
+% compare_date1({T1,T2,T3},{T12,T22,T32}) when T1>T12 ->
+% bigger1;
+% compare_date1({T1,T2,T3},{T1,T22,T32}) when T2>T22 ->
+% bigger1;
+% compare_date1({T1,T2,T3},{T1,T2,T32}) when T3>T32 ->
+% bigger1;
+% compare_date1({T1,T2,T3},{T1,T2,T3})->
+% equal;
+% compare_date1(_D1,_D2)->
+% smaller1.
+
+
+%% IE4 & NS4 sends an extra '; length=xxxx' string at the end of the If-Modified-Since
+%% header, we detect this and ignore it (the RFCs does not mention this).
+strip_date(undefined) ->
+ undefined;
+strip_date([]) ->
+ [];
+strip_date([$;,$ |Rest]) ->
+ [];
+strip_date([C|Rest]) ->
+ [C|strip_date(Rest)].
+
+send_return_value({412,_,_},FileInfo)->
+ {status,{412,none,"Precondition Failed"}};
+
+send_return_value({304,Info,Path},FileInfo)->
+ Suffix=httpd_util:suffix(Path),
+ MimeType = httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Header = [{code,304},
+ {etag,httpd_util:create_etag(FileInfo)},
+ {content_length,0},
+ {last_modified,httpd_util:rfc1123_date(FileInfo#file_info.mtime)}],
+ {response,{response,Header,nobody}}.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security.erl
new file mode 100644
index 0000000000..14197979d1
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security.erl
@@ -0,0 +1,307 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_security.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_security).
+
+%% Security Audit Functionality
+
+%% User API exports
+-export([list_blocked_users/1, list_blocked_users/2, list_blocked_users/3,
+ block_user/4, block_user/5,
+ unblock_user/2, unblock_user/3, unblock_user/4,
+ list_auth_users/1, list_auth_users/2, list_auth_users/3]).
+
+%% module API exports
+-export([do/1, load/2, store/2, remove/1]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"SEC").
+-include("httpd_verbosity.hrl").
+
+
+%% do/1
+do(Info) ->
+ ?vdebug("~n do with ~n Info: ~p",[Info]),
+ %% Check and see if any user has been authorized.
+ case httpd_util:key1search(Info#mod.data,remote_user,not_defined_user) of
+ not_defined_user ->
+ %% No user has been authorized.
+ case httpd_util:key1search(Info#mod.data, status) of
+ %% A status code has been generated!
+ {401, PhraseArgs, Reason} ->
+ case httpd_util:key1search(Info#mod.parsed_header,
+ "authorization") of
+ undefined ->
+ %% Not an authorization attempt (server just replied to
+ %% challenge for authentication)
+ {proceed, Info#mod.data};
+ [$B,$a,$s,$i,$c,$ |EncodedString] ->
+ %% Someone tried to authenticate, and obviously failed!
+ ?vlog("~n Authentication failed: ~s",
+ [EncodedString]),
+ report_failed(Info, EncodedString,"Failed authentication"),
+ take_failed_action(Info, EncodedString),
+ {proceed, Info#mod.data}
+ end;
+ _ ->
+ {proceed, Info#mod.data}
+ end;
+ User ->
+ %% A user has been authenticated, now is he blocked ?
+ ?vtrace("user '~p' authentication",[User]),
+ Path = mod_alias:path(Info#mod.data,
+ Info#mod.config_db,
+ Info#mod.request_uri),
+ {Dir, SDirData} = secretp(Path, Info#mod.config_db),
+ Addr = httpd_util:lookup(Info#mod.config_db, bind_address),
+ Port = httpd_util:lookup(Info#mod.config_db, port),
+ DF = httpd_util:key1search(SDirData, data_file),
+ case mod_security_server:check_blocked_user(Info, User,
+ SDirData,
+ Addr, Port) of
+ true ->
+ ?vtrace("user blocked",[]),
+ report_failed(Info,httpd_util:decode_base64(User) ,"User Blocked"),
+ {proceed, [{status, {403, Info#mod.request_uri, ""}}|Info#mod.data]};
+ false ->
+ ?vtrace("user not blocked",[]),
+ EncodedUser=httpd_util:decode_base64(User),
+ report_failed(Info, EncodedUser,"Authentication Succedded"),
+ mod_security_server:store_successful_auth(Addr, Port,
+ User, SDirData),
+ {proceed, Info#mod.data}
+ end
+ end.
+
+
+
+report_failed(Info, EncodedString,Event) ->
+ Request = Info#mod.request_line,
+ Decoded = httpd_util:decode_base64(EncodedString),
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ String = RemoteHost++" : " ++ Event ++ " : "++Request++" : "++Decoded,
+ mod_disk_log:security_log(Info,String),
+ mod_log:security_log(Info, String).
+
+take_failed_action(Info, EncodedString) ->
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db, Info#mod.request_uri),
+ {Dir, SDirData} = secretp(Path, Info#mod.config_db),
+ Addr = httpd_util:lookup(Info#mod.config_db, bind_address),
+ Port = httpd_util:lookup(Info#mod.config_db, port),
+ DecodedString = httpd_util:decode_base64(EncodedString),
+ mod_security_server:store_failed_auth(Info, Addr, Port,
+ DecodedString, SDirData).
+
+secretp(Path, ConfigDB) ->
+ Directories = ets:match(ConfigDB,{directory,'$1','_'}),
+ case secret_path(Path, Directories) of
+ {yes, Directory} ->
+ SDirs0 = httpd_util:multi_lookup(ConfigDB, security_directory),
+ SDir = lists:filter(fun(X) ->
+ lists:member({path, Directory}, X)
+ end, SDirs0),
+ {Directory, lists:flatten(SDir)};
+ no ->
+ error_report({internal_error_secretp, ?MODULE}),
+ {[], []}
+ end.
+
+secret_path(Path,Directories) ->
+ secret_path(Path, httpd_util:uniq(lists:sort(Directories)), to_be_found).
+
+secret_path(Path, [], to_be_found) ->
+ no;
+secret_path(Path, [], Directory) ->
+ {yes, Directory};
+secret_path(Path, [[NewDirectory]|Rest], Directory) ->
+ case regexp:match(Path, NewDirectory) of
+ {match, _, _} when Directory == to_be_found ->
+ secret_path(Path, Rest, NewDirectory);
+ {match, _, Length} when Length > length(Directory)->
+ secret_path(Path, Rest, NewDirectory);
+ {match, _, Length} ->
+ secret_path(Path, Rest, Directory);
+ nomatch ->
+ secret_path(Path, Rest, Directory)
+ end.
+
+
+load([$<,$D,$i,$r,$e,$c,$t,$o,$r,$y,$ |Directory],[]) ->
+ Dir = httpd_conf:custom_clean(Directory,"",">"),
+ {ok, [{security_directory, Dir, [{path, Dir}]}]};
+load(eof,[{security_directory,Directory, DirData}|_]) ->
+ {error, ?NICE("Premature end-of-file in "++Directory)};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$D,$a,$t,$a,$F,$i,$l,$e,$ |FileName],
+ [{security_directory, Dir, DirData}]) ->
+ File = httpd_conf:clean(FileName),
+ {ok, [{security_directory, Dir, [{data_file, File}|DirData]}]};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$C,$a,$l,$l,$b,$a,$c,$k,$M,$o,$d,$u,$l,$e,$ |ModuleName],
+ [{security_directory, Dir, DirData}]) ->
+ Mod = list_to_atom(httpd_conf:clean(ModuleName)),
+ {ok, [{security_directory, Dir, [{callback_module, Mod}|DirData]}]};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$M,$a,$x,$R,$e,$t,$r,$i,$e,$s,$ |Retries],
+ [{security_directory, Dir, DirData}]) ->
+ MaxRetries = httpd_conf:clean(Retries),
+ load_return_int_tag("SecurityMaxRetries", max_retries,
+ httpd_conf:clean(Retries), Dir, DirData);
+load([$S,$e,$c,$u,$r,$i,$t,$y,$B,$l,$o,$c,$k,$T,$i,$m,$e,$ |Time],
+ [{security_directory, Dir, DirData}]) ->
+ load_return_int_tag("SecurityBlockTime", block_time,
+ httpd_conf:clean(Time), Dir, DirData);
+load([$S,$e,$c,$u,$r,$i,$t,$y,$F,$a,$i,$l,$E,$x,$p,$i,$r,$e,$T,$i,$m,$e,$ |Time],
+ [{security_directory, Dir, DirData}]) ->
+ load_return_int_tag("SecurityFailExpireTime", fail_expire_time,
+ httpd_conf:clean(Time), Dir, DirData);
+load([$S,$e,$c,$u,$r,$i,$t,$y,$A,$u,$t,$h,$T,$i,$m,$e,$o,$u,$t,$ |Time0],
+ [{security_directory, Dir, DirData}]) ->
+ Time = httpd_conf:clean(Time0),
+ load_return_int_tag("SecurityAuthTimeout", auth_timeout,
+ httpd_conf:clean(Time), Dir, DirData);
+load([$A,$u,$t,$h,$N,$a,$m,$e,$ |Name0],
+ [{security_directory, Dir, DirData}]) ->
+ Name = httpd_conf:clean(Name0),
+ {ok, [{security_directory, Dir, [{auth_name, Name}|DirData]}]};
+load("</Directory>",[{security_directory,Directory, DirData}]) ->
+ {ok, [], {security_directory, Directory, DirData}}.
+
+load_return_int_tag(Name, Atom, Time, Dir, DirData) ->
+ case Time of
+ "infinity" ->
+ {ok, [{security_directory, Dir, [{Atom, 99999999999999999999999999999}|DirData]}]};
+ Int ->
+ case catch list_to_integer(Time) of
+ {'EXIT', _} ->
+ {error, Time++" is an invalid "++Name};
+ Val ->
+ {ok, [{security_directory, Dir, [{Atom, Val}|DirData]}]}
+ end
+ end.
+
+store({security_directory, Dir0, DirData}, ConfigList) ->
+ ?CDEBUG("store(security_directory) -> ~n"
+ " Dir0: ~p~n"
+ " DirData: ~p",
+ [Dir0, DirData]),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ Port = httpd_util:key1search(ConfigList, port),
+ mod_security_server:start(Addr, Port),
+ SR = httpd_util:key1search(ConfigList, server_root),
+ Dir =
+ case filename:pathtype(Dir0) of
+ relative ->
+ filename:join(SR, Dir0);
+ _ ->
+ Dir0
+ end,
+ case httpd_util:key1search(DirData, data_file, no_data_file) of
+ no_data_file ->
+ {error, no_security_data_file};
+ DataFile0 ->
+ DataFile =
+ case filename:pathtype(DataFile0) of
+ relative ->
+ filename:join(SR, DataFile0);
+ _ ->
+ DataFile0
+ end,
+ case mod_security_server:new_table(Addr, Port, DataFile) of
+ {ok, TwoTables} ->
+ NewDirData0 = lists:keyreplace(data_file, 1, DirData,
+ {data_file, TwoTables}),
+ NewDirData1 = case Addr of
+ undefined ->
+ [{port,Port}|NewDirData0];
+ _ ->
+ [{port,Port},{bind_address,Addr}|
+ NewDirData0]
+ end,
+ {ok, {security_directory,NewDirData1}};
+ {error, Err} ->
+ {error, {{open_data_file, DataFile}, Err}}
+ end
+ end.
+
+
+remove(ConfigDB) ->
+ Addr = case ets:lookup(ConfigDB, bind_address) of
+ [] ->
+ undefined;
+ [{bind_address, Address}] ->
+ Address
+ end,
+ [{port, Port}] = ets:lookup(ConfigDB, port),
+ mod_security_server:delete_tables(Addr, Port),
+ mod_security_server:stop(Addr, Port).
+
+
+%%
+%% User API
+%%
+
+%% list_blocked_users
+
+list_blocked_users(Port) ->
+ list_blocked_users(undefined, Port).
+
+list_blocked_users(Port, Dir) when integer(Port) ->
+ list_blocked_users(undefined,Port,Dir);
+list_blocked_users(Addr, Port) when integer(Port) ->
+ mod_security_server:list_blocked_users(Addr, Port).
+
+list_blocked_users(Addr, Port, Dir) ->
+ mod_security_server:list_blocked_users(Addr, Port, Dir).
+
+
+%% block_user
+
+block_user(User, Port, Dir, Time) ->
+ block_user(User, undefined, Port, Dir, Time).
+block_user(User, Addr, Port, Dir, Time) ->
+ mod_security_server:block_user(User, Addr, Port, Dir, Time).
+
+
+%% unblock_user
+
+unblock_user(User, Port) ->
+ unblock_user(User, undefined, Port).
+
+unblock_user(User, Port, Dir) when integer(Port) ->
+ unblock_user(User, undefined, Port, Dir);
+unblock_user(User, Addr, Port) when integer(Port) ->
+ mod_security_server:unblock_user(User, Addr, Port).
+
+unblock_user(User, Addr, Port, Dir) ->
+ mod_security_server:unblock_user(User, Addr, Port, Dir).
+
+
+%% list_auth_users
+
+list_auth_users(Port) ->
+ list_auth_users(undefined,Port).
+
+list_auth_users(Port, Dir) when integer(Port) ->
+ list_auth_users(undefined, Port, Dir);
+list_auth_users(Addr, Port) when integer(Port) ->
+ mod_security_server:list_auth_users(Addr, Port).
+
+list_auth_users(Addr, Port, Dir) ->
+ mod_security_server:list_auth_users(Addr, Port, Dir).
+
+
+error_report(M) ->
+ error_logger:error_report(M).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security_server.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security_server.erl
new file mode 100644
index 0000000000..7df61df63e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_security_server.erl
@@ -0,0 +1,728 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_security_server.erl,v 1.1 2008/12/17 09:53:36 mikpe Exp $
+%%
+%% Security Audit Functionality
+
+%%
+%% The gen_server code.
+%%
+%% A gen_server is needed in this module to take care of shared access to the
+%% data file used to store failed and successful authentications aswell as
+%% user blocks.
+%%
+%% The storage model is a write-through model with both an ets and a dets
+%% table. Writes are done to both the ets and then the dets table, but reads
+%% are only done from the ets table.
+%%
+%% This approach also enables parallelism when using dets by returning the
+%% same dets table identifier when opening several files with the same
+%% physical location.
+%%
+%% NOTE: This could be implemented using a single dets table, as it is
+%% possible to open a dets file with the ram_file flag, but this
+%% would require periodical sync's to disk, and it would be hard
+%% to decide when such an operation should occur.
+%%
+
+
+-module(mod_security_server).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+
+-behaviour(gen_server).
+
+
+%% User API exports (called via mod_security)
+-export([list_blocked_users/2, list_blocked_users/3,
+ block_user/5,
+ unblock_user/3, unblock_user/4,
+ list_auth_users/2, list_auth_users/3]).
+
+%% Internal exports (for mod_security only)
+-export([start/2, stop/1, stop/2,
+ new_table/3, delete_tables/2,
+ store_failed_auth/5, store_successful_auth/4,
+ check_blocked_user/5]).
+
+%% gen_server exports
+-export([start_link/3,
+ init/1,
+ handle_info/2, handle_call/3, handle_cast/2,
+ terminate/2,
+ code_change/3]).
+
+-export([verbosity/3]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% External API %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% start_link/3
+%%
+%% NOTE: This is called by httpd_misc_sup when the process is started
+%%
+
+start_link(Addr, Port, Verbosity) ->
+ ?vtrace("start_link -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ gen_server:start_link({local, Name}, ?MODULE, [Verbosity],
+ [{timeout, infinity}]).
+
+
+%% start/2
+%% Called by the mod_security module.
+
+start(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ Verbosity = get(security_verbosity),
+ case httpd_misc_sup:start_sec_server(Addr, Port, Verbosity) of
+ {ok, Pid} ->
+ put(security_server, Pid),
+ ok;
+ Error ->
+ exit({failed_start_security_server, Error})
+ end;
+ _ -> %% Already started...
+ ok
+ end.
+
+
+%% stop
+
+stop(Port) ->
+ stop(undefined, Port).
+stop(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ ok;
+ _ ->
+ httpd_misc_sup:stop_sec_server(Addr, Port)
+ end.
+
+
+%% verbosity
+
+verbosity(Addr, Port, Verbosity) ->
+ Name = make_name(Addr, Port),
+ Req = {verbosity, Verbosity},
+ call(Name, Req).
+
+
+%% list_blocked_users
+
+list_blocked_users(Addr, Port) ->
+ Name = make_name(Addr,Port),
+ Req = {list_blocked_users, Addr, Port, '_'},
+ call(Name, Req).
+
+list_blocked_users(Addr, Port, Dir) ->
+ Name = make_name(Addr, Port),
+ Req = {list_blocked_users, Addr, Port, Dir},
+ call(Name, Req).
+
+
+%% block_user
+
+block_user(User, Addr, Port, Dir, Time) ->
+ Name = make_name(Addr, Port),
+ Req = {block_user, User, Addr, Port, Dir, Time},
+ call(Name, Req).
+
+
+%% unblock_user
+
+unblock_user(User, Addr, Port) ->
+ Name = make_name(Addr, Port),
+ Req = {unblock_user, User, Addr, Port, '_'},
+ call(Name, Req).
+
+unblock_user(User, Addr, Port, Dir) ->
+ Name = make_name(Addr, Port),
+ Req = {unblock_user, User, Addr, Port, Dir},
+ call(Name, Req).
+
+
+%% list_auth_users
+
+list_auth_users(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ Req = {list_auth_users, Addr, Port, '_'},
+ call(Name, Req).
+
+list_auth_users(Addr, Port, Dir) ->
+ Name = make_name(Addr,Port),
+ Req = {list_auth_users, Addr, Port, Dir},
+ call(Name, Req).
+
+
+%% new_table
+
+new_table(Addr, Port, TabName) ->
+ Name = make_name(Addr,Port),
+ Req = {new_table, Addr, Port, TabName},
+ call(Name, Req).
+
+
+%% delete_tables
+
+delete_tables(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ ok;
+ _ ->
+ call(Name, delete_tables)
+ end.
+
+
+%% store_failed_auth
+
+store_failed_auth(Info, Addr, Port, DecodedString, SDirData) ->
+ Name = make_name(Addr,Port),
+ Msg = {store_failed_auth,[Info,DecodedString,SDirData]},
+ cast(Name, Msg).
+
+
+%% store_successful_auth
+
+store_successful_auth(Addr, Port, User, SDirData) ->
+ Name = make_name(Addr,Port),
+ Msg = {store_successful_auth, [User,Addr,Port,SDirData]},
+ cast(Name, Msg).
+
+
+%% check_blocked_user
+
+check_blocked_user(Info, User, SDirData, Addr, Port) ->
+ Name = make_name(Addr, Port),
+ Req = {check_blocked_user, [Info, User, SDirData]},
+ call(Name, Req).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Server call-back functions %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% init
+
+init([undefined]) ->
+ init([?default_verbosity]);
+init([Verbosity]) ->
+ ?DEBUG("init -> entry with Verbosity: ~p",[Verbosity]),
+ process_flag(trap_exit, true),
+ put(sname, sec),
+ put(verbosity, Verbosity),
+ ?vlog("starting",[]),
+ {ok, []}.
+
+
+%% handle_call
+
+handle_call(stop, _From, Tables) ->
+ ?vlog("stop",[]),
+ {stop, normal, ok, []};
+
+
+handle_call({verbosity,Verbosity}, _From, Tables) ->
+ ?vlog("set verbosity to ~p",[Verbosity]),
+ OldVerbosity = get(verbosity),
+ put(verbosity,Verbosity),
+ ?vdebug("old verbosity: ~p",[OldVerbosity]),
+ {reply,OldVerbosity,Tables};
+
+
+handle_call({block_user, User, Addr, Port, Dir, Time}, _From, Tables) ->
+ ?vlog("block user '~p' for ~p",[User,Dir]),
+ Ret = block_user_int({User, Addr, Port, Dir, Time}),
+ ?vdebug("block user result: ~p",[Ret]),
+ {reply, Ret, Tables};
+
+
+handle_call({list_blocked_users, Addr, Port, Dir}, _From, Tables) ->
+ ?vlog("list blocked users for ~p",[Dir]),
+ Blocked = list_blocked(Tables, Addr, Port, Dir, []),
+ ?vdebug("list blocked users: ~p",[Blocked]),
+ {reply, Blocked, Tables};
+
+
+handle_call({unblock_user, User, Addr, Port, Dir}, _From, Tables) ->
+ ?vlog("unblock user '~p' for ~p",[User,Dir]),
+ Ret = unblock_user_int({User, Addr, Port, Dir}),
+ ?vdebug("unblock user result: ~p",[Ret]),
+ {reply, Ret, Tables};
+
+
+handle_call({list_auth_users, Addr, Port, Dir}, _From, Tables) ->
+ ?vlog("list auth users for ~p",[Dir]),
+ Auth = list_auth(Tables, Addr, Port, Dir, []),
+ ?vdebug("list auth users result: ~p",[Auth]),
+ {reply, Auth, Tables};
+
+
+handle_call({new_table, Addr, Port, Name}, _From, Tables) ->
+ case lists:keysearch(Name, 1, Tables) of
+ {value, {Name, {Ets, Dets}}} ->
+ ?DEBUG("handle_call(new_table) -> we already have this table: ~p",
+ [Name]),
+ ?vdebug("new table; we already have this one: ~p",[Name]),
+ {reply, {ok, {Ets, Dets}}, Tables};
+ false ->
+ ?LOG("handle_call(new_table) -> new_table: Name = ~p",[Name]),
+ ?vlog("new table: ~p",[Name]),
+ TName = make_name(Addr,Port,length(Tables)),
+ ?DEBUG("handle_call(new_table) -> TName: ~p",[TName]),
+ ?vdebug("new table: ~p",[TName]),
+ case dets:open_file(TName, [{type, bag}, {file, Name},
+ {repair, true},
+ {access, read_write}]) of
+ {ok, DFile} ->
+ ETS = ets:new(TName, [bag, private]),
+ sync_dets_to_ets(DFile, ETS),
+ NewTables = [{Name, {ETS, DFile}}|Tables],
+ ?DEBUG("handle_call(new_table) -> ~n"
+ " NewTables: ~p",[NewTables]),
+ ?vtrace("new tables: ~p",[NewTables]),
+ {reply, {ok, {ETS, DFile}}, NewTables};
+ {error, Err} ->
+ ?LOG("handle_call -> Err: ~p",[Err]),
+ ?vinfo("failed open dets file: ~p",[Err]),
+ {reply, {error, {create_dets, Err}}, Tables}
+ end
+ end;
+
+handle_call(delete_tables, _From, Tables) ->
+ ?vlog("delete tables",[]),
+ lists:foreach(fun({Name, {ETS, DETS}}) ->
+ dets:close(DETS),
+ ets:delete(ETS)
+ end, Tables),
+ {reply, ok, []};
+
+handle_call({check_blocked_user, [Info, User, SDirData]}, _From, Tables) ->
+ ?vlog("check blocked user '~p'",[User]),
+ {ETS, DETS} = httpd_util:key1search(SDirData, data_file),
+ Dir = httpd_util:key1search(SDirData, path),
+ Addr = httpd_util:key1search(SDirData, bind_address),
+ Port = httpd_util:key1search(SDirData, port),
+ CBModule = httpd_util:key1search(SDirData, callback_module, no_module_at_all),
+ ?vdebug("call back module: ~p",[CBModule]),
+ Ret = check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule),
+ ?vdebug("check result: ~p",[Ret]),
+ {reply, Ret, Tables};
+handle_call(Request,From,Tables) ->
+ ?vinfo("~n unknown call '~p' from ~p",[Request,From]),
+ {reply,ok,Tables}.
+
+
+%% handle_cast
+
+handle_cast({store_failed_auth, [Info, DecodedString, SDirData]}, Tables) ->
+ ?vlog("store failed auth",[]),
+ {ETS, DETS} = httpd_util:key1search(SDirData, data_file),
+ Dir = httpd_util:key1search(SDirData, path),
+ Addr = httpd_util:key1search(SDirData, bind_address),
+ Port = httpd_util:key1search(SDirData, port),
+ {ok, [User,Password]} = httpd_util:split(DecodedString,":",2),
+ ?vdebug("user '~p' and password '~p'",[User,Password]),
+ Seconds = universal_time(),
+ Key = {User, Dir, Addr, Port},
+
+ %% Event
+ CBModule = httpd_util:key1search(SDirData, callback_module, no_module_at_all),
+ ?vtrace("call back module: ~p",[CBModule]),
+ auth_fail_event(CBModule,Addr,Port,Dir,User,Password),
+
+ %% Find out if any of this user's other failed logins are too old to keep..
+ ?vtrace("remove old login failures",[]),
+ case ets:match_object(ETS, {failed, {Key, '_', '_'}}) of
+ [] ->
+ ?vtrace("no old login failures",[]),
+ no;
+ List when list(List) ->
+ ?vtrace("~p old login failures",[length(List)]),
+ ExpireTime = httpd_util:key1search(SDirData, fail_expire_time, 30)*60,
+ ?vtrace("expire time ~p",[ExpireTime]),
+ lists:map(fun({failed, {TheKey, LS, Gen}}) ->
+ Diff = Seconds-LS,
+ if
+ Diff > ExpireTime ->
+ ?vtrace("~n '~p' is to old to keep: ~p",
+ [TheKey,Gen]),
+ ets:match_delete(ETS, {failed, {TheKey, LS, Gen}}),
+ dets:match_delete(DETS, {failed, {TheKey, LS, Gen}});
+ true ->
+ ?vtrace("~n '~p' is not old enough: ~p",
+ [TheKey,Gen]),
+ ok
+ end
+ end,
+ List);
+ O ->
+ ?vlog("~n unknown login failure search resuylt: ~p",[O]),
+ no
+ end,
+
+ %% Insert the new failure..
+ Generation = length(ets:match_object(ETS, {failed, {Key, '_', '_'}})),
+ ?vtrace("insert ('~p') new login failure: ~p",[Key,Generation]),
+ ets:insert(ETS, {failed, {Key, Seconds, Generation}}),
+ dets:insert(DETS, {failed, {Key, Seconds, Generation}}),
+
+ %% See if we should block this user..
+ MaxRetries = httpd_util:key1search(SDirData, max_retries, 3),
+ BlockTime = httpd_util:key1search(SDirData, block_time, 60),
+ ?vtrace("~n Max retries ~p, block time ~p",[MaxRetries,BlockTime]),
+ case ets:match_object(ETS, {failed, {Key, '_', '_'}}) of
+ List1 ->
+ ?vtrace("~n ~p tries so far",[length(List1)]),
+ if
+ length(List1) >= MaxRetries ->
+ %% Block this user until Future
+ ?vtrace("block user '~p'",[User]),
+ Future = Seconds+BlockTime*60,
+ ?vtrace("future: ~p",[Future]),
+ Reason = io_lib:format("Blocking user ~s from dir ~s "
+ "for ~p minutes",
+ [User, Dir, BlockTime]),
+ mod_log:security_log(Info, lists:flatten(Reason)),
+
+ %% Event
+ user_block_event(CBModule,Addr,Port,Dir,User),
+
+ ets:match_delete(ETS,{blocked_user,
+ {User, Addr, Port, Dir, '$1'}}),
+ dets:match_delete(DETS, {blocked_user,
+ {User, Addr, Port, Dir, '$1'}}),
+ BlockRecord = {blocked_user,
+ {User, Addr, Port, Dir, Future}},
+ ets:insert(ETS, BlockRecord),
+ dets:insert(DETS, BlockRecord),
+ %% Remove previous failed requests.
+ ets:match_delete(ETS, {failed, {Key, '_', '_'}}),
+ dets:match_delete(DETS, {failed, {Key, '_', '_'}});
+ true ->
+ ?vtrace("still some tries to go",[]),
+ no
+ end;
+ Other ->
+ no
+ end,
+ {noreply, Tables};
+
+handle_cast({store_successful_auth, [User, Addr, Port, SDirData]}, Tables) ->
+ ?vlog("store successfull auth",[]),
+ {ETS, DETS} = httpd_util:key1search(SDirData, data_file),
+ AuthTimeOut = httpd_util:key1search(SDirData, auth_timeout, 30),
+ Dir = httpd_util:key1search(SDirData, path),
+ Key = {User, Dir, Addr, Port},
+
+ %% Remove failed entries for this Key
+ dets:match_delete(DETS, {failed, {Key, '_', '_'}}),
+ ets:match_delete(ETS, {failed, {Key, '_', '_'}}),
+
+ %% Keep track of when the last successful login took place.
+ Seconds = universal_time()+AuthTimeOut,
+ ets:match_delete(ETS, {success, {Key, '_'}}),
+ dets:match_delete(DETS, {success, {Key, '_'}}),
+ ets:insert(ETS, {success, {Key, Seconds}}),
+ dets:insert(DETS, {success, {Key, Seconds}}),
+ {noreply, Tables};
+
+handle_cast(Req, Tables) ->
+ ?vinfo("~n unknown cast '~p'",[Req]),
+ error_msg("security server got unknown cast: ~p",[Req]),
+ {noreply, Tables}.
+
+
+%% handle_info
+
+handle_info(Info, State) ->
+ ?vinfo("~n unknown info '~p'",[Info]),
+ {noreply, State}.
+
+
+%% terminate
+
+terminate(Reason, _Tables) ->
+ ?vlog("~n Terminating for reason: ~p",[Reason]),
+ ok.
+
+
+%% code_change({down, ToVsn}, State, Extra)
+%%
+code_change({down, _}, State, _Extra) ->
+ ?vlog("downgrade", []),
+ {ok, State};
+
+
+%% code_change(FromVsn, State, Extra)
+%%
+code_change(_, State, Extra) ->
+ ?vlog("upgrade", []),
+ {ok, State}.
+
+
+
+
+%% block_user_int/2
+block_user_int({User, Addr, Port, Dir, Time}) ->
+ Dirs = httpd_manager:config_match(Addr, Port, {security_directory, '_'}),
+ ?vtrace("block '~p' for ~p during ~p",[User,Dir,Time]),
+ case find_dirdata(Dirs, Dir) of
+ {ok, DirData, {ETS, DETS}} ->
+ Time1 =
+ case Time of
+ infinity ->
+ 99999999999999999999999999999;
+ _ ->
+ Time
+ end,
+ Future = universal_time()+Time1,
+ ets:match_delete(ETS, {blocked_user, {User,Addr,Port,Dir,'_'}}),
+ dets:match_delete(DETS, {blocked_user, {User,Addr,Port,Dir,'_'}}),
+ ets:insert(ETS, {blocked_user, {User,Addr,Port,Dir,Future}}),
+ dets:insert(DETS, {blocked_user, {User,Addr,Port,Dir,Future}}),
+ CBModule = httpd_util:key1search(DirData, callback_module,
+ no_module_at_all),
+ ?vtrace("call back module ~p",[CBModule]),
+ user_block_event(CBModule,Addr,Port,Dir,User),
+ true;
+ _ ->
+ {error, no_such_directory}
+ end.
+
+
+find_dirdata([], _Dir) ->
+ false;
+find_dirdata([{security_directory, DirData}|SDirs], Dir) ->
+ case lists:keysearch(path, 1, DirData) of
+ {value, {path, Dir}} ->
+ {value, {data_file, {ETS, DETS}}} =
+ lists:keysearch(data_file, 1, DirData),
+ {ok, DirData, {ETS, DETS}};
+ _ ->
+ find_dirdata(SDirs, Dir)
+ end.
+
+%% unblock_user_int/2
+
+unblock_user_int({User, Addr, Port, Dir}) ->
+ ?vtrace("unblock user '~p' for ~p",[User,Dir]),
+ Dirs = httpd_manager:config_match(Addr, Port, {security_directory, '_'}),
+ ?vtrace("~n dirs: ~p",[Dirs]),
+ case find_dirdata(Dirs, Dir) of
+ {ok, DirData, {ETS, DETS}} ->
+ case ets:match_object(ETS,{blocked_user,{User,Addr,Port,Dir,'_'}}) of
+ [] ->
+ ?vtrace("not blocked",[]),
+ {error, not_blocked};
+ Objects ->
+ ets:match_delete(ETS, {blocked_user,
+ {User, Addr, Port, Dir, '_'}}),
+ dets:match_delete(DETS, {blocked_user,
+ {User, Addr, Port, Dir, '_'}}),
+ CBModule = httpd_util:key1search(DirData, callback_module,
+ no_module_at_all),
+ user_unblock_event(CBModule,Addr,Port,Dir,User),
+ true
+ end;
+ _ ->
+ ?vlog("~n cannot unblock: no such directory '~p'",[Dir]),
+ {error, no_such_directory}
+ end.
+
+
+
+%% list_auth/2
+
+list_auth([], _Addr, _Port, Dir, Acc) ->
+ Acc;
+list_auth([{Name, {ETS, DETS}}|Tables], Addr, Port, Dir, Acc) ->
+ case ets:match_object(ETS, {success, {{'_', Dir, Addr, Port}, '_'}}) of
+ [] ->
+ list_auth(Tables, Addr, Port, Dir, Acc);
+ List when list(List) ->
+ TN = universal_time(),
+ NewAcc = lists:foldr(fun({success,{{U,Ad,P,D},T}},Ac) ->
+ if
+ T-TN > 0 ->
+ [U|Ac];
+ true ->
+ Rec = {success,{{U,Ad,P,D},T}},
+ ets:match_delete(ETS,Rec),
+ dets:match_delete(DETS,Rec),
+ Ac
+ end
+ end,
+ Acc, List),
+ list_auth(Tables, Addr, Port, Dir, NewAcc);
+ _ ->
+ list_auth(Tables, Addr, Port, Dir, Acc)
+ end.
+
+
+%% list_blocked/2
+
+list_blocked([], Addr, Port, Dir, Acc) ->
+ TN = universal_time(),
+ lists:foldl(fun({U,Ad,P,D,T}, Ac) ->
+ if
+ T-TN > 0 ->
+ [{U,Ad,P,D,local_time(T)}|Ac];
+ true ->
+ Ac
+ end
+ end,
+ [], Acc);
+list_blocked([{Name, {ETS, DETS}}|Tables], Addr, Port, Dir, Acc) ->
+ NewBlocked =
+ case ets:match_object(ETS, {blocked_user, {'_',Addr,Port,Dir,'_'}}) of
+ List when list(List) ->
+ lists:foldl(fun({blocked_user, X}, A) -> [X|A] end, Acc, List);
+ _ ->
+ Acc
+ end,
+ list_blocked(Tables, Addr, Port, Dir, NewBlocked).
+
+
+%%
+%% sync_dets_to_ets/2
+%%
+%% Reads dets-table DETS and syncronizes it with the ets-table ETS.
+%%
+sync_dets_to_ets(DETS, ETS) ->
+ dets:traverse(DETS, fun(X) ->
+ ets:insert(ETS, X),
+ continue
+ end).
+
+%%
+%% check_blocked_user/7 -> true | false
+%%
+%% Check if a specific user is blocked from access.
+%%
+%% The sideeffect of this routine is that it unblocks also other users
+%% whos blocking time has expired. This to keep the tables as small
+%% as possible.
+%%
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule) ->
+ TN = universal_time(),
+ case ets:match_object(ETS, {blocked_user, {User, '_', '_', '_', '_'}}) of
+ List when list(List) ->
+ Blocked = lists:foldl(fun({blocked_user, X}, A) ->
+ [X|A] end, [], List),
+ check_blocked_user(Info,User,Dir,Addr,Port,ETS,DETS,TN,Blocked,CBModule);
+ _ ->
+ false
+ end.
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN, [], CBModule) ->
+ false;
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN,
+ [{User,Addr,Port,Dir,T}|Ls], CBModule) ->
+ TD = T-TN,
+ if
+ TD =< 0 ->
+ %% Blocking has expired, remove and grant access.
+ unblock_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule),
+ false;
+ true ->
+ true
+ end;
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN,
+ [{OUser,ODir,OAddr,OPort,T}|Ls], CBModule) ->
+ TD = T-TN,
+ if
+ TD =< 0 ->
+ %% Blocking has expired, remove.
+ unblock_user(Info, OUser, ODir, OAddr, OPort, ETS, DETS, CBModule);
+ true ->
+ true
+ end,
+ check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN, Ls, CBModule).
+
+unblock_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule) ->
+ Reason=io_lib:format("User ~s was removed from the block list for dir ~s",
+ [User, Dir]),
+ mod_log:security_log(Info, lists:flatten(Reason)),
+ user_unblock_event(CBModule,Addr,Port,Dir,User),
+ dets:match_delete(DETS, {blocked_user, {User, Addr, Port, Dir, '_'}}),
+ ets:match_delete(ETS, {blocked_user, {User, Addr, Port, Dir, '_'}}).
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_security",Addr,Port).
+
+make_name(Addr,Port,Num) ->
+ httpd_util:make_name("httpd_security",Addr,Port,
+ "__" ++ integer_to_list(Num)).
+
+
+auth_fail_event(Mod,Addr,Port,Dir,User,Passwd) ->
+ event(auth_fail,Mod,Addr,Port,Dir,[{user,User},{password,Passwd}]).
+
+user_block_event(Mod,Addr,Port,Dir,User) ->
+ event(user_block,Mod,Addr,Port,Dir,[{user,User}]).
+
+user_unblock_event(Mod,Addr,Port,Dir,User) ->
+ event(user_unblock,Mod,Addr,Port,Dir,[{user,User}]).
+
+event(Event,Mod,undefined,Port,Dir,Info) ->
+ (catch Mod:event(Event,Port,Dir,Info));
+event(Event,Mod,Addr,Port,Dir,Info) ->
+ (catch Mod:event(Event,Addr,Port,Dir,Info)).
+
+universal_time() ->
+ calendar:datetime_to_gregorian_seconds(calendar:universal_time()).
+
+local_time(T) ->
+ calendar:universal_time_to_local_time(
+ calendar:gregorian_seconds_to_datetime(T)).
+
+
+error_msg(F, A) ->
+ error_logger:error_msg(F, A).
+
+
+call(Name, Req) ->
+ case (catch gen_server:call(Name, Req)) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Reply ->
+ Reply
+ end.
+
+
+cast(Name, Msg) ->
+ case (catch gen_server:cast(Name, Msg)) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Result ->
+ Result
+ end.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_trace.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_trace.erl
new file mode 100644
index 0000000000..51fe6d283a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/mod_trace.erl
@@ -0,0 +1,69 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_trace.erl,v 1.1 2008/12/17 09:53:36 mikpe Exp $
+%%
+-module(mod_trace).
+
+-export([do/1]).
+
+-include("httpd.hrl").
+
+
+do(Info) ->
+ %%?vtrace("do",[]),
+ case Info#mod.method of
+ "TRACE" ->
+ case httpd_util:response_generated(Info) of
+ false->
+ generate_trace_response(Info);
+ true->
+ {proceed,Info#mod.data}
+ end;
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+
+%%---------------------------------------------------------------------
+%%Generate the trace response the trace response consists of a
+%%http-header and the body will be the request.
+%5----------------------------------------------------------------------
+
+generate_trace_response(Info)->
+ RequestHead=Info#mod.parsed_header,
+ Body=generate_trace_response_body(RequestHead),
+ Len=length(Body),
+ Response=["HTTP/1.1 200 OK\r\n",
+ "Content-Type:message/http\r\n",
+ "Content-Length:",integer_to_list(Len),"\r\n\r\n",
+ Info#mod.request_line,Body],
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,Response),
+ {proceed,[{response,{already_sent,200,Len}}|Info#mod.data]}.
+
+generate_trace_response_body(Parsed_header)->
+ generate_trace_response_body(Parsed_header,[]).
+
+generate_trace_response_body([],Head)->
+ lists:flatten(Head);
+generate_trace_response_body([{[],[]}|Rest],Head) ->
+ generate_trace_response_body(Rest,Head);
+generate_trace_response_body([{Field,Value}|Rest],Head) ->
+ generate_trace_response_body(Rest,[Field ++ ":" ++ Value ++ "\r\n"|Head]).
+
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/uri.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/uri.erl
new file mode 100644
index 0000000000..e1acd62a31
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/inets/uri.erl
@@ -0,0 +1,349 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+%% Author : Johan Blom <[email protected]>
+%% Description :
+%% Implements various scheme dependent subsets (e.g. HTTP, FTP etc) based on
+%% RFC 2396, Uniform Resource Identifiers (URI): Generic Syntax
+%% Created : 27 Jul 2001 by Johan Blom <[email protected]>
+%%
+
+-module(uri).
+
+-author('[email protected]').
+
+-export([parse/1,resolve/2]).
+
+
+%%% Parse URI and return {Scheme,Path}
+%%% Note that Scheme specific parsing/validation is not handled here!
+resolve(Root,Rel) ->
+ ok.
+
+%%% See "http://www.isi.edu/in-notes/iana/assignments/url-schemes" for a list of
+%%% defined URL schemes and references to its sources.
+
+parse(URI) ->
+ case parse_scheme(URI) of
+ {http,Cont} -> parse_http(Cont,http);
+ {https,Cont} -> parse_http(Cont,https);
+ {ftp,Cont} -> parse_ftp(Cont,ftp);
+ {sip,Cont} -> parse_sip(Cont,sip);
+ {sms,Cont} -> parse_sms(Cont,sip);
+ {error,Error} -> {error,Error};
+ {Scheme,Cont} -> {Scheme,Cont}
+ end.
+
+
+%%% Parse the scheme.
+parse_scheme(URI) ->
+ parse_scheme(URI,[]).
+
+parse_scheme([H|URI],Acc) when $a=<H,H=<$z; $A=<H,H=<$Z ->
+ parse_scheme2(URI,[H|Acc]);
+parse_scheme(_,_) ->
+ {error,no_scheme}.
+
+parse_scheme2([H|URI],Acc)
+ when $a=<H,H=<$z; $A=<H,H=<$Z; $0=<H,H=<$9; H==$-;H==$+;H==$. ->
+ parse_scheme2(URI,[H|Acc]);
+parse_scheme2([$:|URI],Acc) ->
+ {list_to_atom(lists:reverse(Acc)),URI};
+parse_scheme2(_,_) ->
+ {error,no_scheme}.
+
+
+%%% ............................................................................
+-define(HTTP_DEFAULT_PORT, 80).
+-define(HTTPS_DEFAULT_PORT, 443).
+
+%%% HTTP (Source RFC 2396, RFC 2616)
+%%% http_URL = "*" | absoluteURI | abs_path [ "?" query ] | authority
+
+%%% http_URL = "http:" "//" host [ ":" port ] [ abs_path [ "?" query ]]
+%%% Returns a tuple {http,Host,Port,PathQuery} where
+%%% Host = string() Host value
+%%% Port = string() Port value
+%%% PathQuery= string() Combined absolute path and query value
+parse_http("//"++C0,Scheme) ->
+ case scan_hostport(C0,Scheme) of
+ {C1,Host,Port} ->
+ case scan_pathquery(C1) of
+ {error,Error} ->
+ {error,Error};
+ PathQuery ->
+ {Scheme,Host,Port,PathQuery}
+ end;
+ {error,Error} ->
+ {error,Error}
+ end;
+parse_http(_,_) ->
+ {error,invalid_url}.
+
+scan_pathquery(C0) ->
+ case scan_abspath(C0) of
+ {error,Error} ->
+ {error,Error};
+ {[],[]} -> % Add implicit path
+ "/";
+ {"?"++C1,Path} ->
+ case scan_query(C1,[]) of
+ {error,Error} ->
+ {error,Error};
+ Query ->
+ Path++"?"++Query
+ end;
+ {[],Path} ->
+ Path
+ end.
+
+
+%%% ............................................................................
+%%% FIXME!!! This is just a quick hack that doesn't work!
+-define(FTP_DEFAULT_PORT, 80).
+
+%%% FTP (Source RFC 2396, RFC 1738, RFC 959)
+%%% Note: This BNF has been modified to better fit with RFC 2396
+%%% ftp_URL = "ftp:" "//" [ ftp_userinfo ] host [ ":" port ] ftp_abs_path
+%%% ftp_userinfo = ftp_user [ ":" ftp_password ]
+%%% ftp_abs_path = "/" ftp_path_segments [ ";type=" ftp_type ]
+%%% ftp_path_segments = ftp_segment *( "/" ftp_segment)
+%%% ftp_segment = *[ ftp_uchar | "?" | ":" | "@" | "&" | "=" ]
+%%% ftp_type = "A" | "I" | "D" | "a" | "i" | "d"
+%%% ftp_user = *[ ftp_uchar | ";" | "?" | "&" | "=" ]
+%%% ftp_password = *[ ftp_uchar | ";" | "?" | "&" | "=" ]
+%%% ftp_uchar = ftp_unreserved | escaped
+%%% ftp_unreserved = alphanum | mark | "$" | "+" | ","
+parse_ftp("//"++C0,Scheme) ->
+ case ftp_userinfo(C0) of
+ {C1,Creds} ->
+ case scan_hostport(C1,Scheme) of
+ {C2,Host,Port} ->
+ case scan_abspath(C2) of
+ {error,Error} ->
+ {error,Error};
+ {[],[]} -> % Add implicit path
+ {Scheme,Creds,Host,Port,"/"};
+ {[],Path} ->
+ {Scheme,Creds,Host,Port,Path}
+ end;
+ {error,Error} ->
+ {error,Error}
+ end;
+ {error,Error} ->
+ {error,Error}
+ end.
+
+ftp_userinfo(C0) ->
+ User="",
+ Password="",
+ {C0,{User,Password}}.
+
+
+%%% ............................................................................
+%%% SIP (Source RFC 2396, RFC 2543)
+%%% sip_URL = "sip:" [ sip_userinfo "@" ] host [ ":" port ]
+%%% sip_url-parameters [ sip_headers ]
+%%% sip_userinfo = sip_user [ ":" sip_password ]
+%%% sip_user = *( unreserved | escaped | "&" | "=" | "+" | "$" | "," )
+%%% sip_password = *( unreserved | escaped | "&" | "=" | "+" | "$" | "," )
+%%% sip_url-parameters = *( ";" sip_url-parameter )
+%%% sip_url-parameter = sip_transport-param | sip_user-param |
+%%% sip_method-param | sip_ttl-param |
+%%% sip_maddr-param | sip_other-param
+%%% sip_transport-param = "transport=" ( "udp" | "tcp" )
+%%% sip_ttl-param = "ttl=" sip_ttl
+%%% sip_ttl = 1*3DIGIT ; 0 to 255
+%%% sip_maddr-param = "maddr=" host
+%%% sip_user-param = "user=" ( "phone" | "ip" )
+%%% sip_method-param = "method=" sip_Method
+%%% sip_tag-param = "tag=" sip_UUID
+%%% sip_UUID = 1*( hex | "-" )
+%%% sip_other-param = ( token | ( token "=" ( token | quoted-string )))
+%%% sip_Method = "INVITE" | "ACK" | "OPTIONS" | "BYE" |
+%%% "CANCEL" | "REGISTER"
+%%% sip_token = 1*< any CHAR except CTL's or separators>
+%%% sip_quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
+%%% sip_qdtext = <any TEXT-UTF8 except <">>
+%%% sip_quoted-pair = " \ " CHAR
+parse_sip(Cont,Scheme) ->
+ {Scheme,Cont}.
+
+
+
+
+%%% ............................................................................
+%%% SMS (Source draft-wilde-sms-uri-01, January 24 2002 and
+%%% draft-allocchio-gstn-01, November 2001)
+%%% The syntax definition for "gstn-phone" is taken from
+%%% [draft-allocchio-gstn-01], allowing global as well as local telephone
+%%% numbers.
+%%% Note: This BNF has been modified to better fit with RFC 2396
+%%% sms_URI = sms ":" 1*( sms-recipient ) [ sms-body ]
+%%% sms-recipient = gstn-phone sms-qualifier
+%%% [ "," sms-recipient ]
+%%% sms-qualifier = *( smsc-qualifier / pid-qualifier )
+%%% smsc-qualifier = ";smsc=" SMSC-sub-addr
+%%% pid-qualifier = ";pid=" PID-sub-addr
+%%% sms-body = ";body=" *urlc
+%%% gstn-phone = ( global-phone / local-phone )
+%%% global-phone = "+" 1*( DIGIT / written-sep )
+%%% local-phone = [ exit-code ] dial-number / exit-code [ dial-number ]
+%%% exit-code = phone-string
+%%% dial-number = phone-string
+%%% subaddr-string = phone-string
+%%% post-dial = phone-string
+%%% phone-string = 1*( DTMF / pause / tonewait / written-sep )
+%%% DTMF = ( DIGIT / "#" / "*" / "A" / "B" / "C" / "D" )
+%%% written-sep = ( "-" / "." )
+%%% pause = "p"
+%%% tonewait = "w"
+parse_sms(Cont,Scheme) ->
+ {Scheme,Cont}.
+
+
+%%% ============================================================================
+%%% Generic URI parsing. BNF rules from RFC 2396
+
+%%% hostport = host [ ":" port ]
+scan_hostport(C0,Scheme) ->
+ case scan_host(C0) of
+ {error,Error} ->
+ {error,Error};
+ {":"++C1,Host} ->
+ {C2,Port}=scan_port(C1,[]),
+ {C2,Host,list_to_integer(Port)};
+ {C1,Host} when Scheme==http ->
+ {C1,Host,?HTTP_DEFAULT_PORT};
+ {C1,Host} when Scheme==https ->
+ {C1,Host,?HTTPS_DEFAULT_PORT};
+ {C1,Host} when Scheme==ftp ->
+ {C1,Host,?FTP_DEFAULT_PORT}
+ end.
+
+
+%%% host = hostname | IPv4address
+%%% hostname = *( domainlabel "." ) toplabel [ "." ]
+%%% domainlabel = alphanum | alphanum *( alphanum | "-" ) alphanum
+%%% toplabel = alpha | alpha *( alphanum | "-" ) alphanum
+%%% IPv4address = 1*digit "." 1*digit "." 1*digit "." 1*digit
+
+-define(ALPHA, 1).
+-define(DIGIT, 2).
+
+scan_host(C0) ->
+ case scan_host2(C0,[],0,[],[]) of
+ {C1,IPv4address,[?DIGIT,?DIGIT,?DIGIT,?DIGIT]} ->
+ {C1,lists:reverse(lists:append(IPv4address))};
+ {C1,Hostname,[?ALPHA|HostF]} ->
+ {C1,lists:reverse(lists:append(Hostname))};
+ _ ->
+ {error,no_host}
+ end.
+
+scan_host2([H|C0],Acc,CurF,Host,HostF) when $0=<H,H=<$9 ->
+ scan_host2(C0,[H|Acc],CurF bor ?DIGIT,Host,HostF);
+scan_host2([H|C0],Acc,CurF,Host,HostF) when $a=<H,H=<$z; $A=<H,H=<$Z ->
+ scan_host2(C0,[H|Acc],CurF bor ?ALPHA,Host,HostF);
+scan_host2([$-|C0],Acc,CurF,Host,HostF) when CurF=/=0 ->
+ scan_host2(C0,[$-|Acc],CurF,Host,HostF);
+scan_host2([$.|C0],Acc,CurF,Host,HostF) when CurF=/=0 ->
+ scan_host2(C0,[],0,[".",Acc|Host],[CurF|HostF]);
+scan_host2(C0,Acc,CurF,Host,HostF) ->
+ {C0,[Acc|Host],[CurF|HostF]}.
+
+
+%%% port = *digit
+scan_port([H|C0],Acc) when $0=<H,H=<$9 ->
+ scan_port(C0,[H|Acc]);
+scan_port(C0,Acc) ->
+ {C0,lists:reverse(Acc)}.
+
+%%% abs_path = "/" path_segments
+scan_abspath([]) ->
+ {[],[]};
+scan_abspath("/"++C0) ->
+ scan_pathsegments(C0,["/"]);
+scan_abspath(_) ->
+ {error,no_abspath}.
+
+%%% path_segments = segment *( "/" segment )
+scan_pathsegments(C0,Acc) ->
+ case scan_segment(C0,[]) of
+ {"/"++C1,Segment} ->
+ scan_pathsegments(C1,["/",Segment|Acc]);
+ {C1,Segment} ->
+ {C1,lists:reverse(lists:append([Segment|Acc]))}
+ end.
+
+
+%%% segment = *pchar *( ";" param )
+%%% param = *pchar
+scan_segment(";"++C0,Acc) ->
+ {C1,ParamAcc}=scan_pchars(C0,";"++Acc),
+ scan_segment(C1,ParamAcc);
+scan_segment(C0,Acc) ->
+ case scan_pchars(C0,Acc) of
+ {";"++C1,Segment} ->
+ {C2,ParamAcc}=scan_pchars(C1,";"++Segment),
+ scan_segment(C2,ParamAcc);
+ {C1,Segment} ->
+ {C1,Segment}
+ end.
+
+%%% query = *uric
+%%% uric = reserved | unreserved | escaped
+%%% reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
+%%% "$" | ","
+%%% unreserved = alphanum | mark
+%%% mark = "-" | "_" | "." | "!" | "~" | "*" | "'" |
+%%% "(" | ")"
+%%% escaped = "%" hex hex
+scan_query([],Acc) ->
+ lists:reverse(Acc);
+scan_query([$%,H1,H2|C0],Acc) -> % escaped
+ scan_query(C0,[hex2dec(H1)*16+hex2dec(H2)|Acc]);
+scan_query([H|C0],Acc) when $a=<H,H=<$z;$A=<H,H=<$Z;$0=<H,H=<$9 -> % alphanum
+ scan_query(C0,[H|Acc]);
+scan_query([H|C0],Acc) when H==$;; H==$/; H==$?; H==$:; H==$@;
+ H==$&; H==$=; H==$+; H==$$; H==$, -> % reserved
+ scan_query(C0,[H|Acc]);
+scan_query([H|C0],Acc) when H==$-; H==$_; H==$.; H==$!; H==$~;
+ H==$*; H==$'; H==$(; H==$) -> % mark
+ scan_query(C0,[H|Acc]);
+scan_query([H|C0],Acc) ->
+ {error,no_query}.
+
+
+%%% pchar = unreserved | escaped |
+%%% ":" | "@" | "&" | "=" | "+" | "$" | ","
+scan_pchars([],Acc) ->
+ {[],Acc};
+scan_pchars([$%,H1,H2|C0],Acc) -> % escaped
+ scan_pchars(C0,[hex2dec(H1)*16+hex2dec(H2)|Acc]);
+scan_pchars([H|C0],Acc) when $a=<H,H=<$z;$A=<H,H=<$Z;$0=<H,H=<$9 -> % alphanum
+ scan_pchars(C0,[H|Acc]);
+scan_pchars([H|C0],Acc) when H==$-; H==$_; H==$.; H==$!; H==$~;
+ H==$*; H==$'; H==$(; H==$) -> % mark
+ scan_pchars(C0,[H|Acc]);
+scan_pchars([H|C0],Acc) when H==$:; H==$@; H==$&; H==$=; H==$+; H==$$; H==$, ->
+ scan_pchars(C0,[H|Acc]);
+scan_pchars(C0,Acc) ->
+ {C0,Acc}.
+
+hex2dec(X) when X>=$0,X=<$9 -> X-$0;
+hex2dec(X) when X>=$A,X=<$F -> X-$A+10;
+hex2dec(X) when X>=$a,X=<$f -> X-$a+10.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/Makefile b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/Makefile
new file mode 100644
index 0000000000..461dc82155
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/Makefile
@@ -0,0 +1,137 @@
+# ``The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved via the world wide web at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+# Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+# AB. All Rights Reserved.''
+#
+# $Id: Makefile,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+#
+include $(ERL_TOP)/make/target.mk
+
+ifeq ($(TYPE),debug)
+ERL_COMPILE_FLAGS += -Ddebug -W
+endif
+
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+VSN=$(MNESIA_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/mnesia-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULES= \
+ mnesia \
+ mnesia_backup \
+ mnesia_bup \
+ mnesia_checkpoint \
+ mnesia_checkpoint_sup \
+ mnesia_controller \
+ mnesia_dumper\
+ mnesia_event \
+ mnesia_frag \
+ mnesia_frag_hash \
+ mnesia_frag_old_hash \
+ mnesia_index \
+ mnesia_kernel_sup \
+ mnesia_late_loader \
+ mnesia_lib\
+ mnesia_loader \
+ mnesia_locker \
+ mnesia_log \
+ mnesia_monitor \
+ mnesia_recover \
+ mnesia_registry \
+ mnesia_schema\
+ mnesia_snmp_hook \
+ mnesia_snmp_sup \
+ mnesia_subscr \
+ mnesia_sup \
+ mnesia_sp \
+ mnesia_text \
+ mnesia_tm
+
+HRL_FILES= mnesia.hrl
+
+ERL_FILES= $(MODULES:%=%.erl)
+
+TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
+
+APP_FILE= mnesia.app
+
+APP_SRC= $(APP_FILE).src
+APP_TARGET= $(EBIN)/$(APP_FILE)
+
+APPUP_FILE= mnesia.appup
+
+APPUP_SRC= $(APPUP_FILE).src
+APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
+
+
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_FLAGS +=
+ERL_COMPILE_FLAGS += \
+ +warn_unused_vars \
+ +'{parse_transform,sys_pre_attributes}' \
+ +'{attribute,insert,vsn,"mnesia_$(MNESIA_VSN)"}' \
+ -W
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+opt: $(TARGET_FILES)
+
+debug:
+ @${MAKE} TYPE=debug
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f core
+
+docs:
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
+
+release_docs_spec:
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.app.src b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.app.src
new file mode 100644
index 0000000000..3715488ec2
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.app.src
@@ -0,0 +1,52 @@
+{application, mnesia,
+ [{description, "MNESIA CXC 138 12"},
+ {vsn, "%VSN%"},
+ {modules, [
+ mnesia,
+ mnesia_backup,
+ mnesia_bup,
+ mnesia_checkpoint,
+ mnesia_checkpoint_sup,
+ mnesia_controller,
+ mnesia_dumper,
+ mnesia_event,
+ mnesia_frag,
+ mnesia_frag_hash,
+ mnesia_frag_old_hash,
+ mnesia_index,
+ mnesia_kernel_sup,
+ mnesia_late_loader,
+ mnesia_lib,
+ mnesia_loader,
+ mnesia_locker,
+ mnesia_log,
+ mnesia_monitor,
+ mnesia_recover,
+ mnesia_registry,
+ mnesia_schema,
+ mnesia_snmp_hook,
+ mnesia_snmp_sup,
+ mnesia_subscr,
+ mnesia_sup,
+ mnesia_sp,
+ mnesia_text,
+ mnesia_tm
+ ]},
+ {registered, [
+ mnesia_dumper_load_regulator,
+ mnesia_event,
+ mnesia_fallback,
+ mnesia_controller,
+ mnesia_kernel_sup,
+ mnesia_late_loader,
+ mnesia_locker,
+ mnesia_monitor,
+ mnesia_recover,
+ mnesia_substr,
+ mnesia_sup,
+ mnesia_tm
+ ]},
+ {applications, [kernel, stdlib]},
+ {mod, {mnesia_sup, []}}]}.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.appup.src b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.appup.src
new file mode 100644
index 0000000000..502ddb02fc
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.appup.src
@@ -0,0 +1,6 @@
+{"%VSN%",
+ [
+ ],
+ [
+ ]
+}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.erl
new file mode 100644
index 0000000000..956f4f5395
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.erl
@@ -0,0 +1,2191 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+%% This module exports the public interface of the Mnesia DBMS engine
+
+-module(mnesia).
+%-behaviour(mnesia_access).
+
+-export([
+ %% Start, stop and debugging
+ start/0, start/1, stop/0, % Not for public use
+ set_debug_level/1, lkill/0, kill/0, % Not for public use
+ ms/0, nc/0, nc/1, ni/0, ni/1, % Not for public use
+ change_config/2,
+
+ %% Activity mgt
+ abort/1, transaction/1, transaction/2, transaction/3,
+ sync_transaction/1, sync_transaction/2, sync_transaction/3,
+ async_dirty/1, async_dirty/2, sync_dirty/1, sync_dirty/2, ets/1, ets/2,
+ activity/2, activity/3, activity/4, % Not for public use
+
+ %% Access within an activity - Lock acquisition
+ lock/2, lock/4,
+ read_lock_table/1,
+ write_lock_table/1,
+
+ %% Access within an activity - Updates
+ write/1, s_write/1, write/3, write/5,
+ delete/1, s_delete/1, delete/3, delete/5,
+ delete_object/1, s_delete_object/1, delete_object/3, delete_object/5,
+
+ %% Access within an activity - Reads
+ read/1, wread/1, read/3, read/5,
+ match_object/1, match_object/3, match_object/5,
+ select/2, select/3, select/5,
+ all_keys/1, all_keys/4,
+ index_match_object/2, index_match_object/4, index_match_object/6,
+ index_read/3, index_read/6,
+
+ %% Iterators within an activity
+ foldl/3, foldl/4, foldr/3, foldr/4,
+
+ %% Dirty access regardless of activities - Updates
+ dirty_write/1, dirty_write/2,
+ dirty_delete/1, dirty_delete/2,
+ dirty_delete_object/1, dirty_delete_object/2,
+ dirty_update_counter/2, dirty_update_counter/3,
+
+ %% Dirty access regardless of activities - Read
+ dirty_read/1, dirty_read/2,
+ dirty_select/2,
+ dirty_match_object/1, dirty_match_object/2, dirty_all_keys/1,
+ dirty_index_match_object/2, dirty_index_match_object/3,
+ dirty_index_read/3, dirty_slot/2,
+ dirty_first/1, dirty_next/2, dirty_last/1, dirty_prev/2,
+
+ %% Info
+ table_info/2, table_info/4, schema/0, schema/1,
+ error_description/1, info/0, system_info/1,
+ system_info/0, % Not for public use
+
+ %% Database mgt
+ create_schema/1, delete_schema/1,
+ backup/1, backup/2, traverse_backup/4, traverse_backup/6,
+ install_fallback/1, install_fallback/2,
+ uninstall_fallback/0, uninstall_fallback/1,
+ activate_checkpoint/1, deactivate_checkpoint/1,
+ backup_checkpoint/2, backup_checkpoint/3, restore/2,
+
+ %% Table mgt
+ create_table/1, create_table/2, delete_table/1,
+ add_table_copy/3, del_table_copy/2, move_table_copy/3,
+ add_table_index/2, del_table_index/2,
+ transform_table/3, transform_table/4,
+ change_table_copy_type/3,
+ read_table_property/2, write_table_property/2, delete_table_property/2,
+ change_table_frag/2,
+ clear_table/1,
+
+ %% Table load
+ dump_tables/1, wait_for_tables/2, force_load_table/1,
+ change_table_access_mode/2, change_table_load_order/2,
+ set_master_nodes/1, set_master_nodes/2,
+
+ %% Misc admin
+ dump_log/0, subscribe/1, unsubscribe/1, report_event/1,
+
+ %% Snmp
+ snmp_open_table/2, snmp_close_table/1,
+ snmp_get_row/2, snmp_get_next_index/2, snmp_get_mnesia_key/2,
+
+ %% Textfile access
+ load_textfile/1, dump_to_textfile/1,
+
+ %% Mnemosyne exclusive
+ get_activity_id/0, put_activity_id/1, % Not for public use
+
+ %% Mnesia internal functions
+ dirty_rpc/4, % Not for public use
+ has_var/1, fun_select/7,
+ foldl/6, foldr/6,
+
+ %% Module internal callback functions
+ remote_dirty_match_object/2, % Not for public use
+ remote_dirty_select/2 % Not for public use
+ ]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [verbose/2]).
+
+-define(DEFAULT_ACCESS, ?MODULE).
+
+%% Select
+-define(PATTERN_TO_OBJECT_MATCH_SPEC(Pat), [{Pat,[],['$_']}]).
+-define(PATTERN_TO_BINDINGS_MATCH_SPEC(Pat), [{Pat,[],['$$']}]).
+
+%% Local function in order to avoid external function call
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+is_dollar_digits(Var) ->
+ case atom_to_list(Var) of
+ [$$ | Digs] ->
+ is_digits(Digs);
+ _ ->
+ false
+ end.
+
+is_digits([Dig | Tail]) ->
+ if
+ $0 =< Dig, Dig =< $9 ->
+ is_digits(Tail);
+ true ->
+ false
+ end;
+is_digits([]) ->
+ true.
+
+has_var(X) when atom(X) ->
+ if
+ X == '_' ->
+ true;
+ atom(X) ->
+ is_dollar_digits(X);
+ true ->
+ false
+ end;
+has_var(X) when tuple(X) ->
+ e_has_var(X, size(X));
+has_var([H|T]) ->
+ case has_var(H) of
+ false -> has_var(T);
+ Other -> Other
+ end;
+has_var(_) -> false.
+
+e_has_var(_, 0) -> false;
+e_has_var(X, Pos) ->
+ case has_var(element(Pos, X))of
+ false -> e_has_var(X, Pos-1);
+ Other -> Other
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Start and stop
+
+start() ->
+ {Time , Res} = timer:tc(application, start, [?APPLICATION, temporary]),
+
+ Secs = Time div 1000000,
+ case Res of
+ ok ->
+ verbose("Mnesia started, ~p seconds~n",[ Secs]),
+ ok;
+ {error, {already_started, mnesia}} ->
+ verbose("Mnesia already started, ~p seconds~n",[ Secs]),
+ ok;
+ {error, R} ->
+ verbose("Mnesia failed to start, ~p seconds: ~p~n",[ Secs, R]),
+ {error, R}
+ end.
+
+start(ExtraEnv) when list(ExtraEnv) ->
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ patched_start(ExtraEnv);
+ Error ->
+ Error
+ end;
+start(ExtraEnv) ->
+ {error, {badarg, ExtraEnv}}.
+
+patched_start([{Env, Val} | Tail]) when atom(Env) ->
+ case mnesia_monitor:patch_env(Env, Val) of
+ {error, Reason} ->
+ {error, Reason};
+ _NewVal ->
+ patched_start(Tail)
+ end;
+patched_start([Head | _]) ->
+ {error, {bad_type, Head}};
+patched_start([]) ->
+ start().
+
+stop() ->
+ case application:stop(?APPLICATION) of
+ ok -> stopped;
+ {error, {not_started, ?APPLICATION}} -> stopped;
+ Other -> Other
+ end.
+
+change_config(extra_db_nodes, Ns) when list(Ns) ->
+ mnesia_controller:connect_nodes(Ns);
+change_config(BadKey, _BadVal) ->
+ {error, {badarg, BadKey}}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Debugging
+
+set_debug_level(Level) ->
+ mnesia_subscr:set_debug_level(Level).
+
+lkill() ->
+ mnesia_sup:kill().
+
+kill() ->
+ rpc:multicall(mnesia_sup, kill, []).
+
+ms() ->
+ [
+ mnesia,
+ mnesia_backup,
+ mnesia_bup,
+ mnesia_checkpoint,
+ mnesia_checkpoint_sup,
+ mnesia_controller,
+ mnesia_dumper,
+ mnesia_loader,
+ mnesia_frag,
+ mnesia_frag_hash,
+ mnesia_frag_old_hash,
+ mnesia_index,
+ mnesia_kernel_sup,
+ mnesia_late_loader,
+ mnesia_lib,
+ mnesia_log,
+ mnesia_registry,
+ mnesia_schema,
+ mnesia_snmp_hook,
+ mnesia_snmp_sup,
+ mnesia_subscr,
+ mnesia_sup,
+ mnesia_text,
+ mnesia_tm,
+ mnesia_recover,
+ mnesia_locker,
+
+ %% Keep these last in the list, so
+ %% mnesia_sup kills these last
+ mnesia_monitor,
+ mnesia_event
+ ].
+
+nc() ->
+ Mods = ms(),
+ nc(Mods).
+
+nc(Mods) when list(Mods)->
+ [Mod || Mod <- Mods, ok /= load(Mod, compile)].
+
+ni() ->
+ Mods = ms(),
+ ni(Mods).
+
+ni(Mods) when list(Mods) ->
+ [Mod || Mod <- Mods, ok /= load(Mod, interpret)].
+
+load(Mod, How) when atom(Mod) ->
+ case try_load(Mod, How) of
+ ok ->
+ ok;
+ _ ->
+ mnesia_lib:show( "~n RETRY ~p FROM: ", [Mod]),
+ Abs = mod2abs(Mod),
+ load(Abs, How)
+ end;
+load(Abs, How) ->
+ case try_load(Abs, How) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ mnesia_lib:show( " *** ERROR *** ~p~n", [Reason]),
+ {error, Reason}
+ end.
+
+try_load(Mod, How) ->
+ mnesia_lib:show( " ~p ", [Mod]),
+ Flags = [{d, debug}],
+ case How of
+ compile ->
+ case catch c:nc(Mod, Flags) of
+ {ok, _} -> ok;
+ Other -> {error, Other}
+ end;
+ interpret ->
+ case catch int:ni(Mod, Flags) of
+ {module, _} -> ok;
+ Other -> {error, Other}
+ end
+ end.
+
+mod2abs(Mod) ->
+ ModString = atom_to_list(Mod),
+ SubDir =
+ case lists:suffix("test", ModString) of
+ true -> test;
+ false -> src
+ end,
+ filename:join([code:lib_dir(?APPLICATION), SubDir, ModString]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Activity mgt
+
+abort(Reason) ->
+ exit({aborted, Reason}).
+
+transaction(Fun) ->
+ transaction(get(mnesia_activity_state), Fun, [], infinity, ?DEFAULT_ACCESS, async).
+transaction(Fun, Retries) when integer(Retries), Retries >= 0 ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, async);
+transaction(Fun, Retries) when Retries == infinity ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, async);
+transaction(Fun, Args) ->
+ transaction(get(mnesia_activity_state), Fun, Args, infinity, ?DEFAULT_ACCESS, async).
+transaction(Fun, Args, Retries) ->
+ transaction(get(mnesia_activity_state), Fun, Args, Retries, ?DEFAULT_ACCESS, async).
+
+sync_transaction(Fun) ->
+ transaction(get(mnesia_activity_state), Fun, [], infinity, ?DEFAULT_ACCESS, sync).
+sync_transaction(Fun, Retries) when integer(Retries), Retries >= 0 ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, sync);
+sync_transaction(Fun, Retries) when Retries == infinity ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, sync);
+sync_transaction(Fun, Args) ->
+ transaction(get(mnesia_activity_state), Fun, Args, infinity, ?DEFAULT_ACCESS, sync).
+sync_transaction(Fun, Args, Retries) ->
+ transaction(get(mnesia_activity_state), Fun, Args, Retries, ?DEFAULT_ACCESS, sync).
+
+
+transaction(State, Fun, Args, Retries, Mod, Kind)
+ when function(Fun), list(Args), Retries == infinity, atom(Mod) ->
+ mnesia_tm:transaction(State, Fun, Args, Retries, Mod, Kind);
+transaction(State, Fun, Args, Retries, Mod, Kind)
+ when function(Fun), list(Args), integer(Retries), Retries >= 0, atom(Mod) ->
+ mnesia_tm:transaction(State, Fun, Args, Retries, Mod, Kind);
+transaction(_State, Fun, Args, Retries, Mod, _Kind) ->
+ {aborted, {badarg, Fun, Args, Retries, Mod}}.
+
+non_transaction(State, Fun, Args, ActivityKind, Mod)
+ when function(Fun), list(Args), atom(Mod) ->
+ mnesia_tm:non_transaction(State, Fun, Args, ActivityKind, Mod);
+non_transaction(_State, Fun, Args, _ActivityKind, _Mod) ->
+ {aborted, {badarg, Fun, Args}}.
+
+async_dirty(Fun) ->
+ async_dirty(Fun, []).
+async_dirty(Fun, Args) ->
+ non_transaction(get(mnesia_activity_state), Fun, Args, async_dirty, ?DEFAULT_ACCESS).
+
+sync_dirty(Fun) ->
+ sync_dirty(Fun, []).
+sync_dirty(Fun, Args) ->
+ non_transaction(get(mnesia_activity_state), Fun, Args, sync_dirty, ?DEFAULT_ACCESS).
+
+ets(Fun) ->
+ ets(Fun, []).
+ets(Fun, Args) ->
+ non_transaction(get(mnesia_activity_state), Fun, Args, ets, ?DEFAULT_ACCESS).
+
+activity(Kind, Fun) ->
+ activity(Kind, Fun, []).
+activity(Kind, Fun, Args) when list(Args) ->
+ activity(Kind, Fun, Args, mnesia_monitor:get_env(access_module));
+activity(Kind, Fun, Mod) ->
+ activity(Kind, Fun, [], Mod).
+
+activity(Kind, Fun, Args, Mod) ->
+ State = get(mnesia_activity_state),
+ case Kind of
+ ets -> non_transaction(State, Fun, Args, Kind, Mod);
+ async_dirty -> non_transaction(State, Fun, Args, Kind, Mod);
+ sync_dirty -> non_transaction(State, Fun, Args, Kind, Mod);
+ transaction -> wrap_trans(State, Fun, Args, infinity, Mod, async);
+ {transaction, Retries} -> wrap_trans(State, Fun, Args, Retries, Mod, async);
+ sync_transaction -> wrap_trans(State, Fun, Args, infinity, Mod, sync);
+ {sync_transaction, Retries} -> wrap_trans(State, Fun, Args, Retries, Mod, sync);
+ _ -> {aborted, {bad_type, Kind}}
+ end.
+
+wrap_trans(State, Fun, Args, Retries, Mod, Kind) ->
+ case transaction(State, Fun, Args, Retries, Mod, Kind) of
+ {'atomic', GoodRes} -> GoodRes;
+ BadRes -> exit(BadRes)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access within an activity - lock acquisition
+
+%% Grab a lock on an item in the global lock table
+%% Item may be any term. Lock may be write or read.
+%% write lock is set on all the given nodes
+%% read lock is only set on the first node
+%% Nodes may either be a list of nodes or one node as an atom
+%% Mnesia on all Nodes must be connected to each other, but
+%% it is not neccessary that they are up and running.
+
+lock(LockItem, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ lock(Tid, Ts, LockItem, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:lock(Tid, Ts, LockItem, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+lock(Tid, Ts, LockItem, LockKind) ->
+ case element(1, Tid) of
+ tid ->
+ case LockItem of
+ {record, Tab, Key} ->
+ lock_record(Tid, Ts, Tab, Key, LockKind);
+ {table, Tab} ->
+ lock_table(Tid, Ts, Tab, LockKind);
+ {global, GlobalKey, Nodes} ->
+ global_lock(Tid, Ts, GlobalKey, LockKind, Nodes);
+ _ ->
+ abort({bad_type, LockItem})
+ end;
+ _Protocol ->
+ []
+ end.
+
+%% Grab a read lock on a whole table
+read_lock_table(Tab) ->
+ lock({table, Tab}, read),
+ ok.
+
+%% Grab a write lock on a whole table
+write_lock_table(Tab) ->
+ lock({table, Tab}, write),
+ ok.
+
+lock_record(Tid, Ts, Tab, Key, LockKind) when atom(Tab) ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, Key},
+ case LockKind of
+ read ->
+ mnesia_locker:rlock(Tid, Store, Oid);
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ none ->
+ [];
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+lock_record(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+lock_table(Tid, Ts, Tab, LockKind) when atom(Tab) ->
+ Store = Ts#tidstore.store,
+ case LockKind of
+ read ->
+ mnesia_locker:rlock_table(Tid, Store, Tab);
+ write ->
+ mnesia_locker:wlock_table(Tid, Store, Tab);
+ sticky_write ->
+ mnesia_locker:sticky_wlock_table(Tid, Store, Tab);
+ none ->
+ [];
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+lock_table(_Tid, _Ts, Tab, _LockKind) ->
+ abort({bad_type, Tab}).
+
+global_lock(Tid, Ts, Item, Kind, Nodes) when list(Nodes) ->
+ case element(1, Tid) of
+ tid ->
+ Store = Ts#tidstore.store,
+ GoodNs = good_global_nodes(Nodes),
+ if
+ Kind /= read, Kind /= write ->
+ abort({bad_type, Kind});
+ true ->
+ mnesia_locker:global_lock(Tid, Store, Item, Kind, GoodNs)
+ end;
+ _Protocol ->
+ []
+ end;
+global_lock(_Tid, _Ts, _Item, _Kind, Nodes) ->
+ abort({bad_type, Nodes}).
+
+good_global_nodes(Nodes) ->
+ Recover = [node() | val(recover_nodes)],
+ mnesia_lib:intersect(Nodes, Recover).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access within an activity - updates
+
+write(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ write(Tab, Val, write);
+write(Val) ->
+ abort({bad_type, Val}).
+
+s_write(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ write(Tab, Val, sticky_write).
+
+write(Tab, Val, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ write(Tid, Ts, Tab, Val, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:write(Tid, Ts, Tab, Val, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+write(Tid, Ts, Tab, Val, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_insert(Tab, Val),
+ ok;
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, element(2, Val)},
+ case LockKind of
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ write_to_store(Tab, Store, Oid, Val);
+ Protocol ->
+ do_dirty_write(Protocol, Tab, Val)
+ end;
+write(_Tid, _Ts, Tab, Val, LockKind) ->
+ abort({bad_type, Tab, Val, LockKind}).
+
+write_to_store(Tab, Store, Oid, Val) ->
+ case ?catch_val({Tab, record_validation}) of
+ {RecName, Arity, Type}
+ when size(Val) == Arity, RecName == element(1, Val) ->
+ case Type of
+ bag ->
+ ?ets_insert(Store, {Oid, Val, write});
+ _ ->
+ ?ets_delete(Store, Oid),
+ ?ets_insert(Store, {Oid, Val, write})
+ end,
+ ok;
+ {'EXIT', _} ->
+ abort({no_exists, Tab});
+ _ ->
+ abort({bad_type, Val})
+ end.
+
+delete({Tab, Key}) ->
+ delete(Tab, Key, write);
+delete(Oid) ->
+ abort({bad_type, Oid}).
+
+s_delete({Tab, Key}) ->
+ delete(Tab, Key, sticky_write);
+s_delete(Oid) ->
+ abort({bad_type, Oid}).
+
+delete(Tab, Key, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ delete(Tid, Ts, Tab, Key, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:delete(Tid, Ts, Tab, Key, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+delete(Tid, Ts, Tab, Key, LockKind)
+ when atom(Tab), Tab /= schema ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_delete(Tab, Key),
+ ok;
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, Key},
+ case LockKind of
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ ?ets_delete(Store, Oid),
+ ?ets_insert(Store, {Oid, Oid, delete}),
+ ok;
+ Protocol ->
+ do_dirty_delete(Protocol, Tab, Key)
+ end;
+delete(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+delete_object(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ delete_object(Tab, Val, write);
+delete_object(Val) ->
+ abort({bad_type, Val}).
+
+s_delete_object(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ delete_object(Tab, Val, sticky_write);
+s_delete_object(Val) ->
+ abort({bad_type, Val}).
+
+delete_object(Tab, Val, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ delete_object(Tid, Ts, Tab, Val, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:delete_object(Tid, Ts, Tab, Val, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+delete_object(Tid, Ts, Tab, Val, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_match_delete(Tab, Val),
+ ok;
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, element(2, Val)},
+ case LockKind of
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ case val({Tab, setorbag}) of
+ bag ->
+ ?ets_match_delete(Store, {Oid, Val, '_'}),
+ ?ets_insert(Store, {Oid, Val, delete_object});
+ _ ->
+ case ?ets_match_object(Store, {Oid, '_', write}) of
+ [] ->
+ ?ets_match_delete(Store, {Oid, Val, '_'}),
+ ?ets_insert(Store, {Oid, Val, delete_object});
+ _ ->
+ ?ets_delete(Store, Oid),
+ ?ets_insert(Store, {Oid, Oid, delete})
+ end
+ end,
+ ok;
+ Protocol ->
+ do_dirty_delete_object(Protocol, Tab, Val)
+ end;
+delete_object(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access within an activity - read
+
+read({Tab, Key}) ->
+ read(Tab, Key, read);
+read(Oid) ->
+ abort({bad_type, Oid}).
+
+wread({Tab, Key}) ->
+ read(Tab, Key, write);
+wread(Oid) ->
+ abort({bad_type, Oid}).
+
+read(Tab, Key, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ read(Tid, Ts, Tab, Key, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:read(Tid, Ts, Tab, Key, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+read(Tid, Ts, Tab, Key, LockKind)
+ when atom(Tab), Tab /= schema ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_lookup(Tab, Key);
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, Key},
+ Objs =
+ case LockKind of
+ read ->
+ mnesia_locker:rlock(Tid, Store, Oid);
+ write ->
+ mnesia_locker:rwlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_rwlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ add_written(?ets_lookup(Store, Oid), Tab, Objs);
+ _Protocol ->
+ dirty_read(Tab, Key)
+ end;
+read(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%
+%% Iterators
+
+foldl(Fun, Acc, Tab) ->
+ foldl(Fun, Acc, Tab, read).
+
+foldl(Fun, Acc, Tab, LockKind) when function(Fun) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ foldl(Tid, Ts, Fun, Acc, Tab, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:foldl(Tid, Ts, Fun, Acc, Tab, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+foldl(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ {Type, Prev} = init_iteration(ActivityId, Opaque, Tab, LockKind),
+ Res = (catch do_foldl(ActivityId, Opaque, Tab, dirty_first(Tab), Fun, Acc, Type, Prev)),
+ close_iteration(Res, Tab).
+
+do_foldl(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) ->
+ lists:foldl(fun(Key, Acc) ->
+ lists:foldl(Fun, Acc, read(A, O, Tab, Key, read))
+ end, RAcc, Stored);
+do_foldl(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H == Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldl(A, O, Tab, dirty_next(Tab, Key), Fun, NewAcc, ordered_set, Stored);
+do_foldl(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H < Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, H, read)),
+ do_foldl(A, O, Tab, Key, Fun, NewAcc, ordered_set, Stored);
+do_foldl(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H > Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldl(A, O, Tab, dirty_next(Tab, Key), Fun, NewAcc, ordered_set, [H |Stored]);
+do_foldl(A, O, Tab, Key, Fun, Acc, Type, Stored) -> %% Type is set or bag
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ NewStored = ordsets:del_element(Key, Stored),
+ do_foldl(A, O, Tab, dirty_next(Tab, Key), Fun, NewAcc, Type, NewStored).
+
+foldr(Fun, Acc, Tab) ->
+ foldr(Fun, Acc, Tab, read).
+foldr(Fun, Acc, Tab, LockKind) when function(Fun) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ foldr(Tid, Ts, Fun, Acc, Tab, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:foldr(Tid, Ts, Fun, Acc, Tab, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+foldr(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ {Type, TempPrev} = init_iteration(ActivityId, Opaque, Tab, LockKind),
+ Prev =
+ if
+ Type == ordered_set ->
+ lists:reverse(TempPrev);
+ true -> %% Order doesn't matter for set and bag
+ TempPrev %% Keep the order so we can use ordsets:del_element
+ end,
+ Res = (catch do_foldr(ActivityId, Opaque, Tab, dirty_last(Tab), Fun, Acc, Type, Prev)),
+ close_iteration(Res, Tab).
+
+do_foldr(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) ->
+ lists:foldl(fun(Key, Acc) ->
+ lists:foldl(Fun, Acc, read(A, O, Tab, Key, read))
+ end, RAcc, Stored);
+do_foldr(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H == Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldr(A, O, Tab, dirty_prev(Tab, Key), Fun, NewAcc, ordered_set, Stored);
+do_foldr(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H > Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, H, read)),
+ do_foldr(A, O, Tab, Key, Fun, NewAcc, ordered_set, Stored);
+do_foldr(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H < Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldr(A, O, Tab, dirty_prev(Tab, Key), Fun, NewAcc, ordered_set, [H |Stored]);
+do_foldr(A, O, Tab, Key, Fun, Acc, Type, Stored) -> %% Type is set or bag
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ NewStored = ordsets:del_element(Key, Stored),
+ do_foldr(A, O, Tab, dirty_prev(Tab, Key), Fun, NewAcc, Type, NewStored).
+
+init_iteration(ActivityId, Opaque, Tab, LockKind) ->
+ lock(ActivityId, Opaque, {table, Tab}, LockKind),
+ Type = val({Tab, setorbag}),
+ Previous = add_previous(ActivityId, Opaque, Type, Tab),
+ St = val({Tab, storage_type}),
+ if
+ St == unknown ->
+ ignore;
+ true ->
+ mnesia_lib:db_fixtable(St, Tab, true)
+ end,
+ {Type, Previous}.
+
+close_iteration(Res, Tab) ->
+ case val({Tab, storage_type}) of
+ unknown ->
+ ignore;
+ St ->
+ mnesia_lib:db_fixtable(St, Tab, false)
+ end,
+ case Res of
+ {'EXIT', {aborted, What}} ->
+ abort(What);
+ {'EXIT', What} ->
+ abort(What);
+ _ ->
+ Res
+ end.
+
+add_previous(_ActivityId, non_transaction, _Type, _Tab) ->
+ [];
+add_previous(_Tid, Ts, _Type, Tab) ->
+ Previous = ?ets_match(Ts#tidstore.store, {{Tab, '$1'}, '_', write}),
+ lists:sort(lists:concat(Previous)).
+
+%% This routine fixes up the return value from read/1 so that
+%% it is correct with respect to what this particular transaction
+%% has already written, deleted .... etc
+
+add_written([], _Tab, Objs) ->
+ Objs; % standard normal fast case
+add_written(Written, Tab, Objs) ->
+ case val({Tab, setorbag}) of
+ bag ->
+ add_written_to_bag(Written, Objs, []);
+ _ ->
+ add_written_to_set(Written)
+ end.
+
+add_written_to_set(Ws) ->
+ case lists:last(Ws) of
+ {_, _, delete} -> [];
+ {_, Val, write} -> [Val];
+ {_, _, delete_object} -> []
+ end.
+
+add_written_to_bag([{_, Val, write} | Tail], Objs, Ack) ->
+ add_written_to_bag(Tail, lists:delete(Val, Objs), [Val | Ack]);
+add_written_to_bag([], Objs, Ack) ->
+ Objs ++ lists:reverse(Ack); %% Oldest write first as in ets
+add_written_to_bag([{_, _ , delete} | Tail], _Objs, _Ack) ->
+ %% This transaction just deleted all objects
+ %% with this key
+ add_written_to_bag(Tail, [], []);
+add_written_to_bag([{_, Val, delete_object} | Tail], Objs, Ack) ->
+ add_written_to_bag(Tail, lists:delete(Val, Objs), lists:delete(Val, Ack)).
+
+match_object(Pat) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ match_object(Tab, Pat, read);
+match_object(Pat) ->
+ abort({bad_type, Pat}).
+
+match_object(Tab, Pat, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ match_object(Tid, Ts, Tab, Pat, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:match_object(Tid, Ts, Tab, Pat, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+match_object(Tid, Ts, Tab, Pat, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ mnesia_lib:db_match_object(ram_copies, Tab, Pat);
+ tid ->
+ Key = element(2, Pat),
+ case has_var(Key) of
+ false -> lock_record(Tid, Ts, Tab, Key, LockKind);
+ true -> lock_table(Tid, Ts, Tab, LockKind)
+ end,
+ Objs = dirty_match_object(Tab, Pat),
+ add_written_match(Ts#tidstore.store, Pat, Tab, Objs);
+ _Protocol ->
+ dirty_match_object(Tab, Pat)
+ end;
+match_object(_Tid, _Ts, Tab, Pat, _LockKind) ->
+ abort({bad_type, Tab, Pat}).
+
+add_written_match(S, Pat, Tab, Objs) ->
+ Ops = find_ops(S, Tab, Pat),
+ add_match(Ops, Objs, val({Tab, setorbag})).
+
+find_ops(S, Tab, Pat) ->
+ GetWritten = [{{{Tab, '_'}, Pat, write}, [], ['$_']},
+ {{{Tab, '_'}, '_', delete}, [], ['$_']},
+ {{{Tab, '_'}, Pat, delete_object}, [], ['$_']}],
+ ets:select(S, GetWritten).
+
+add_match([], Objs, _Type) ->
+ Objs;
+add_match(Written, Objs, ordered_set) ->
+ %% Must use keysort which is stable
+ add_ordered_match(lists:keysort(1,Written), Objs, []);
+add_match([{Oid, _, delete}|R], Objs, Type) ->
+ add_match(R, deloid(Oid, Objs), Type);
+add_match([{_Oid, Val, delete_object}|R], Objs, Type) ->
+ add_match(R, lists:delete(Val, Objs), Type);
+add_match([{_Oid, Val, write}|R], Objs, bag) ->
+ add_match(R, [Val | lists:delete(Val, Objs)], bag);
+add_match([{Oid, Val, write}|R], Objs, set) ->
+ add_match(R, [Val | deloid(Oid,Objs)],set).
+
+%% For ordered_set only !!
+add_ordered_match(Written = [{{_, Key}, _, _}|_], [Obj|Objs], Acc)
+ when Key > element(2, Obj) ->
+ add_ordered_match(Written, Objs, [Obj|Acc]);
+add_ordered_match([{{_, Key}, Val, write}|Rest], Objs =[Obj|_], Acc)
+ when Key < element(2, Obj) ->
+ add_ordered_match(Rest, [Val|Objs],Acc);
+add_ordered_match([{{_, Key}, _, _DelOP}|Rest], Objs =[Obj|_], Acc)
+ when Key < element(2, Obj) ->
+ add_ordered_match(Rest,Objs,Acc);
+%% Greater than last object
+add_ordered_match([{_, Val, write}|Rest], [], Acc) ->
+ add_ordered_match(Rest, [Val], Acc);
+add_ordered_match([_|Rest], [], Acc) ->
+ add_ordered_match(Rest, [], Acc);
+%% Keys are equal from here
+add_ordered_match([{_, Val, write}|Rest], [_Obj|Objs], Acc) ->
+ add_ordered_match(Rest, [Val|Objs], Acc);
+add_ordered_match([{_, _Val, delete}|Rest], [_Obj|Objs], Acc) ->
+ add_ordered_match(Rest, Objs, Acc);
+add_ordered_match([{_, Val, delete_object}|Rest], [Val|Objs], Acc) ->
+ add_ordered_match(Rest, Objs, Acc);
+add_ordered_match([{_, _, delete_object}|Rest], Objs, Acc) ->
+ add_ordered_match(Rest, Objs, Acc);
+add_ordered_match([], Objs, Acc) ->
+ lists:reverse(Acc, Objs).
+
+
+%%%%%%%%%%%%%%%%%%
+% select
+
+select(Tab, Pat) ->
+ select(Tab, Pat, read).
+select(Tab, Pat, LockKind)
+ when atom(Tab), Tab /= schema, list(Pat) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ select(Tid, Ts, Tab, Pat, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:select(Tid, Ts, Tab, Pat, LockKind);
+ _ ->
+ abort(no_transaction)
+ end;
+select(Tab, Pat, _Lock) ->
+ abort({badarg, Tab, Pat}).
+
+select(Tid, Ts, Tab, Spec, LockKind) ->
+ SelectFun = fun(FixedSpec) -> dirty_select(Tab, FixedSpec) end,
+ fun_select(Tid, Ts, Tab, Spec, LockKind, Tab, SelectFun).
+
+fun_select(Tid, Ts, Tab, Spec, LockKind, TabPat, SelectFun) ->
+ case element(1, Tid) of
+ ets ->
+ mnesia_lib:db_select(ram_copies, Tab, Spec);
+ tid ->
+ Store = Ts#tidstore.store,
+ Written = ?ets_match_object(Store, {{TabPat, '_'}, '_', '_'}),
+ %% Avoid table lock if possible
+ case Spec of
+ [{HeadPat,_, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ Key = element(2, HeadPat),
+ case has_var(Key) of
+ false -> lock_record(Tid, Ts, Tab, Key, LockKind);
+ true -> lock_table(Tid, Ts, Tab, LockKind)
+ end;
+ _ ->
+ lock_table(Tid, Ts, Tab, LockKind)
+ end,
+ case Written of
+ [] ->
+ %% Nothing changed in the table during this transaction,
+ %% Simple case get results from [d]ets
+ SelectFun(Spec);
+ _ ->
+ %% Hard (slow case) records added or deleted earlier
+ %% in the transaction, have to cope with that.
+ Type = val({Tab, setorbag}),
+ FixedSpec = get_record_pattern(Spec),
+ TabRecs = SelectFun(FixedSpec),
+ FixedRes = add_match(Written, TabRecs, Type),
+ CMS = ets:match_spec_compile(Spec),
+% case Type of
+% ordered_set ->
+% ets:match_spec_run(lists:sort(FixedRes), CMS);
+% _ ->
+% ets:match_spec_run(FixedRes, CMS)
+% end
+ ets:match_spec_run(FixedRes, CMS)
+ end;
+ _Protocol ->
+ SelectFun(Spec)
+ end.
+
+get_record_pattern([]) ->
+ [];
+get_record_pattern([{M,C,_B}|R]) ->
+ [{M,C,['$_']} | get_record_pattern(R)].
+
+deloid(_Oid, []) ->
+ [];
+deloid({Tab, Key}, [H | T]) when element(2, H) == Key ->
+ deloid({Tab, Key}, T);
+deloid(Oid, [H | T]) ->
+ [H | deloid(Oid, T)].
+
+all_keys(Tab) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ all_keys(Tid, Ts, Tab, read);
+ {Mod, Tid, Ts} ->
+ Mod:all_keys(Tid, Ts, Tab, read);
+ _ ->
+ abort(no_transaction)
+ end.
+
+all_keys(Tid, Ts, Tab, LockKind)
+ when atom(Tab), Tab /= schema ->
+ Pat0 = val({Tab, wild_pattern}),
+ Pat = setelement(2, Pat0, '$1'),
+ Keys = select(Tid, Ts, Tab, [{Pat, [], ['$1']}], LockKind),
+ case val({Tab, setorbag}) of
+ bag ->
+ mnesia_lib:uniq(Keys);
+ _ ->
+ Keys
+ end;
+all_keys(_Tid, _Ts, Tab, _LockKind) ->
+ abort({bad_type, Tab}).
+
+index_match_object(Pat, Attr) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ index_match_object(Tab, Pat, Attr, read);
+index_match_object(Pat, _Attr) ->
+ abort({bad_type, Pat}).
+
+index_match_object(Tab, Pat, Attr, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ index_match_object(Tid, Ts, Tab, Pat, Attr, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:index_match_object(Tid, Ts, Tab, Pat, Attr, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+index_match_object(Tid, Ts, Tab, Pat, Attr, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ dirty_index_match_object(Tab, Pat, Attr); % Should be optimized?
+ tid ->
+ case mnesia_schema:attr_tab_to_pos(Tab, Attr) of
+ Pos when Pos =< size(Pat) ->
+ case LockKind of
+ read ->
+ Store = Ts#tidstore.store,
+ mnesia_locker:rlock_table(Tid, Store, Tab),
+ Objs = dirty_index_match_object(Tab, Pat, Attr),
+ add_written_match(Store, Pat, Tab, Objs);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+ BadPos ->
+ abort({bad_type, Tab, BadPos})
+ end;
+ _Protocol ->
+ dirty_index_match_object(Tab, Pat, Attr)
+ end;
+index_match_object(_Tid, _Ts, Tab, Pat, _Attr, _LockKind) ->
+ abort({bad_type, Tab, Pat}).
+
+index_read(Tab, Key, Attr) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ index_read(Tid, Ts, Tab, Key, Attr, read);
+ {Mod, Tid, Ts} ->
+ Mod:index_read(Tid, Ts, Tab, Key, Attr, read);
+ _ ->
+ abort(no_transaction)
+ end.
+
+index_read(Tid, Ts, Tab, Key, Attr, LockKind)
+ when atom(Tab), Tab /= schema ->
+ case element(1, Tid) of
+ ets ->
+ dirty_index_read(Tab, Key, Attr); % Should be optimized?
+ tid ->
+ Pos = mnesia_schema:attr_tab_to_pos(Tab, Attr),
+ case LockKind of
+ read ->
+ case has_var(Key) of
+ false ->
+ Store = Ts#tidstore.store,
+ Objs = mnesia_index:read(Tid, Store, Tab, Key, Pos),
+ Pat = setelement(Pos, val({Tab, wild_pattern}), Key),
+ add_written_match(Store, Pat, Tab, Objs);
+ true ->
+ abort({bad_type, Tab, Attr, Key})
+ end;
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+ _Protocol ->
+ dirty_index_read(Tab, Key, Attr)
+ end;
+index_read(_Tid, _Ts, Tab, _Key, _Attr, _LockKind) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Dirty access regardless of activities - updates
+
+dirty_write(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ dirty_write(Tab, Val);
+dirty_write(Val) ->
+ abort({bad_type, Val}).
+
+dirty_write(Tab, Val) ->
+ do_dirty_write(async_dirty, Tab, Val).
+
+do_dirty_write(SyncMode, Tab, Val)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ case ?catch_val({Tab, record_validation}) of
+ {RecName, Arity, _Type}
+ when size(Val) == Arity, RecName == element(1, Val) ->
+ Oid = {Tab, element(2, Val)},
+ mnesia_tm:dirty(SyncMode, {Oid, Val, write});
+ {'EXIT', _} ->
+ abort({no_exists, Tab});
+ _ ->
+ abort({bad_type, Val})
+ end;
+do_dirty_write(_SyncMode, Tab, Val) ->
+ abort({bad_type, Tab, Val}).
+
+dirty_delete({Tab, Key}) ->
+ dirty_delete(Tab, Key);
+dirty_delete(Oid) ->
+ abort({bad_type, Oid}).
+
+dirty_delete(Tab, Key) ->
+ do_dirty_delete(async_dirty, Tab, Key).
+
+do_dirty_delete(SyncMode, Tab, Key) when atom(Tab), Tab /= schema ->
+ Oid = {Tab, Key},
+ mnesia_tm:dirty(SyncMode, {Oid, Oid, delete});
+do_dirty_delete(_SyncMode, Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+dirty_delete_object(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ dirty_delete_object(Tab, Val);
+dirty_delete_object(Val) ->
+ abort({bad_type, Val}).
+
+dirty_delete_object(Tab, Val) ->
+ do_dirty_delete_object(async_dirty, Tab, Val).
+
+do_dirty_delete_object(SyncMode, Tab, Val)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ Oid = {Tab, element(2, Val)},
+ mnesia_tm:dirty(SyncMode, {Oid, Val, delete_object});
+do_dirty_delete_object(_SyncMode, Tab, Val) ->
+ abort({bad_type, Tab, Val}).
+
+%% A Counter is an Oid being {CounterTab, CounterName}
+
+dirty_update_counter({Tab, Key}, Incr) ->
+ dirty_update_counter(Tab, Key, Incr);
+dirty_update_counter(Counter, _Incr) ->
+ abort({bad_type, Counter}).
+
+dirty_update_counter(Tab, Key, Incr) ->
+ do_dirty_update_counter(async_dirty, Tab, Key, Incr).
+
+do_dirty_update_counter(SyncMode, Tab, Key, Incr)
+ when atom(Tab), Tab /= schema, integer(Incr) ->
+ case ?catch_val({Tab, record_validation}) of
+ {RecName, 3, set} ->
+ Oid = {Tab, Key},
+ mnesia_tm:dirty(SyncMode, {Oid, {RecName, Incr}, update_counter});
+ _ ->
+ abort({combine_error, Tab, update_counter})
+ end;
+do_dirty_update_counter(_SyncMode, Tab, _Key, Incr) ->
+ abort({bad_type, Tab, Incr}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Dirty access regardless of activities - read
+
+dirty_read({Tab, Key}) ->
+ dirty_read(Tab, Key);
+dirty_read(Oid) ->
+ abort({bad_type, Oid}).
+
+dirty_read(Tab, Key)
+ when atom(Tab), Tab /= schema ->
+%% case catch ?ets_lookup(Tab, Key) of
+%% {'EXIT', _} ->
+ %% Bad luck, we have to perform a real lookup
+ dirty_rpc(Tab, mnesia_lib, db_get, [Tab, Key]);
+%% Val ->
+%% Val
+%% end;
+dirty_read(Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+dirty_match_object(Pat) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ dirty_match_object(Tab, Pat);
+dirty_match_object(Pat) ->
+ abort({bad_type, Pat}).
+
+dirty_match_object(Tab, Pat)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ dirty_rpc(Tab, ?MODULE, remote_dirty_match_object, [Tab, Pat]);
+dirty_match_object(Tab, Pat) ->
+ abort({bad_type, Tab, Pat}).
+
+remote_dirty_match_object(Tab, Pat) ->
+ Key = element(2, Pat),
+ case has_var(Key) of
+ false ->
+ mnesia_lib:db_match_object(Tab, Pat);
+ true ->
+ PosList = val({Tab, index}),
+ remote_dirty_match_object(Tab, Pat, PosList)
+ end.
+
+remote_dirty_match_object(Tab, Pat, [Pos | Tail]) when Pos =< size(Pat) ->
+ IxKey = element(Pos, Pat),
+ case has_var(IxKey) of
+ false ->
+ mnesia_index:dirty_match_object(Tab, Pat, Pos);
+ true ->
+ remote_dirty_match_object(Tab, Pat, Tail)
+ end;
+remote_dirty_match_object(Tab, Pat, []) ->
+ mnesia_lib:db_match_object(Tab, Pat);
+remote_dirty_match_object(Tab, Pat, _PosList) ->
+ abort({bad_type, Tab, Pat}).
+
+dirty_select(Tab, Spec) when atom(Tab), Tab /= schema, list(Spec) ->
+ dirty_rpc(Tab, ?MODULE, remote_dirty_select, [Tab, Spec]);
+dirty_select(Tab, Spec) ->
+ abort({bad_type, Tab, Spec}).
+
+remote_dirty_select(Tab, Spec) ->
+ case Spec of
+ [{HeadPat, _, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ Key = element(2, HeadPat),
+ case has_var(Key) of
+ false ->
+ mnesia_lib:db_select(Tab, Spec);
+ true ->
+ PosList = val({Tab, index}),
+ remote_dirty_select(Tab, Spec, PosList)
+ end;
+ _ ->
+ mnesia_lib:db_select(Tab, Spec)
+ end.
+
+remote_dirty_select(Tab, [{HeadPat,_, _}] = Spec, [Pos | Tail])
+ when tuple(HeadPat), size(HeadPat) > 2, Pos =< size(Spec) ->
+ Key = element(Pos, HeadPat),
+ case has_var(Key) of
+ false ->
+ Recs = mnesia_index:dirty_select(Tab, Spec, Pos),
+ %% Returns the records without applying the match spec
+ %% The actual filtering is handled by the caller
+ CMS = ets:match_spec_compile(Spec),
+ case val({Tab, setorbag}) of
+ ordered_set ->
+ ets:match_spec_run(lists:sort(Recs), CMS);
+ _ ->
+ ets:match_spec_run(Recs, CMS)
+ end;
+ true ->
+ remote_dirty_select(Tab, Spec, Tail)
+ end;
+remote_dirty_select(Tab, Spec, _) ->
+ mnesia_lib:db_select(Tab, Spec).
+
+dirty_all_keys(Tab) when atom(Tab), Tab /= schema ->
+ case ?catch_val({Tab, wild_pattern}) of
+ {'EXIT', _} ->
+ abort({no_exists, Tab});
+ Pat0 ->
+ Pat = setelement(2, Pat0, '$1'),
+ Keys = dirty_select(Tab, [{Pat, [], ['$1']}]),
+ case val({Tab, setorbag}) of
+ bag -> mnesia_lib:uniq(Keys);
+ _ -> Keys
+ end
+ end;
+dirty_all_keys(Tab) ->
+ abort({bad_type, Tab}).
+
+dirty_index_match_object(Pat, Attr) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ dirty_index_match_object(Tab, Pat, Attr);
+dirty_index_match_object(Pat, _Attr) ->
+ abort({bad_type, Pat}).
+
+dirty_index_match_object(Tab, Pat, Attr)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ case mnesia_schema:attr_tab_to_pos(Tab, Attr) of
+ Pos when Pos =< size(Pat) ->
+ case has_var(element(2, Pat)) of
+ false ->
+ dirty_match_object(Tab, Pat);
+ true ->
+ Elem = element(Pos, Pat),
+ case has_var(Elem) of
+ false ->
+ dirty_rpc(Tab, mnesia_index, dirty_match_object,
+ [Tab, Pat, Pos]);
+ true ->
+ abort({bad_type, Tab, Attr, Elem})
+ end
+ end;
+ BadPos ->
+ abort({bad_type, Tab, BadPos})
+ end;
+dirty_index_match_object(Tab, Pat, _Attr) ->
+ abort({bad_type, Tab, Pat}).
+
+dirty_index_read(Tab, Key, Attr) when atom(Tab), Tab /= schema ->
+ Pos = mnesia_schema:attr_tab_to_pos(Tab, Attr),
+ case has_var(Key) of
+ false ->
+ mnesia_index:dirty_read(Tab, Key, Pos);
+ true ->
+ abort({bad_type, Tab, Attr, Key})
+ end;
+dirty_index_read(Tab, _Key, _Attr) ->
+ abort({bad_type, Tab}).
+
+dirty_slot(Tab, Slot) when atom(Tab), Tab /= schema, integer(Slot) ->
+ dirty_rpc(Tab, mnesia_lib, db_slot, [Tab, Slot]);
+dirty_slot(Tab, Slot) ->
+ abort({bad_type, Tab, Slot}).
+
+dirty_first(Tab) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_first, [Tab]);
+dirty_first(Tab) ->
+ abort({bad_type, Tab}).
+
+dirty_last(Tab) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_last, [Tab]);
+dirty_last(Tab) ->
+ abort({bad_type, Tab}).
+
+dirty_next(Tab, Key) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_next_key, [Tab, Key]);
+dirty_next(Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+dirty_prev(Tab, Key) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_prev_key, [Tab, Key]);
+dirty_prev(Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+
+dirty_rpc(Tab, M, F, Args) ->
+ Node = val({Tab, where_to_read}),
+ do_dirty_rpc(Tab, Node, M, F, Args).
+
+do_dirty_rpc(_Tab, nowhere, _, _, Args) ->
+ mnesia:abort({no_exists, Args});
+do_dirty_rpc(Tab, Node, M, F, Args) ->
+ case rpc:call(Node, M, F, Args) of
+ {badrpc,{'EXIT', {undef, [{ M, F, _} | _]}}}
+ when M == ?MODULE, F == remote_dirty_select ->
+ %% Oops, the other node has not been upgraded
+ %% to 4.0.3 yet. Lets do it the old way.
+ %% Remove this in next release.
+ do_dirty_rpc(Tab, Node, mnesia_lib, db_select, Args);
+ {badrpc, Reason} ->
+ erlang:yield(), %% Do not be too eager
+ case mnesia_controller:call({check_w2r, Node, Tab}) of % Sync
+ NewNode when NewNode == Node ->
+ ErrorTag = mnesia_lib:dirty_rpc_error_tag(Reason),
+ mnesia:abort({ErrorTag, Args});
+ NewNode ->
+ case get(mnesia_activity_state) of
+ {_Mod, Tid, _Ts} when record(Tid, tid) ->
+ %% In order to perform a consistent
+ %% retry of a transaction we need
+ %% to acquire the lock on the NewNode.
+ %% In this context we do neither know
+ %% the kind or granularity of the lock.
+ %% --> Abort the transaction
+ mnesia:abort({node_not_running, Node});
+ _ ->
+ %% Splendid! A dirty retry is safe
+ %% 'Node' probably went down now
+ %% Let mnesia_controller get broken link message first
+ do_dirty_rpc(Tab, NewNode, M, F, Args)
+ end
+ end;
+ Other ->
+ Other
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Info
+
+%% Info about one table
+table_info(Tab, Item) ->
+ case get(mnesia_activity_state) of
+ undefined ->
+ any_table_info(Tab, Item);
+ {?DEFAULT_ACCESS, _Tid, _Ts} ->
+ any_table_info(Tab, Item);
+ {Mod, Tid, Ts} ->
+ Mod:table_info(Tid, Ts, Tab, Item);
+ _ ->
+ abort(no_transaction)
+ end.
+
+table_info(_Tid, _Ts, Tab, Item) ->
+ any_table_info(Tab, Item).
+
+
+any_table_info(Tab, Item) when atom(Tab) ->
+ case Item of
+ master_nodes ->
+ mnesia_recover:get_master_nodes(Tab);
+% checkpoints ->
+% case ?catch_val({Tab, commit_work}) of
+% [{checkpoints, List} | _] -> List;
+% No_chk when list(No_chk) -> [];
+% Else -> info_reply(Else, Tab, Item)
+% end;
+ size ->
+ raw_table_info(Tab, Item);
+ memory ->
+ raw_table_info(Tab, Item);
+ type ->
+ case ?catch_val({Tab, setorbag}) of
+ {'EXIT', _} ->
+ bad_info_reply(Tab, Item);
+ Val ->
+ Val
+ end;
+ all ->
+ case mnesia_schema:get_table_properties(Tab) of
+ [] ->
+ abort({no_exists, Tab, Item});
+ Props ->
+ lists:map(fun({setorbag, Type}) -> {type, Type};
+ (Prop) -> Prop end,
+ Props)
+ end;
+ _ ->
+ case ?catch_val({Tab, Item}) of
+ {'EXIT', _} ->
+ bad_info_reply(Tab, Item);
+ Val ->
+ Val
+ end
+ end;
+any_table_info(Tab, _Item) ->
+ abort({bad_type, Tab}).
+
+raw_table_info(Tab, Item) ->
+ case ?catch_val({Tab, storage_type}) of
+ ram_copies ->
+ info_reply(catch ?ets_info(Tab, Item), Tab, Item);
+ disc_copies ->
+ info_reply(catch ?ets_info(Tab, Item), Tab, Item);
+ disc_only_copies ->
+ info_reply(catch dets:info(Tab, Item), Tab, Item);
+ unknown ->
+ bad_info_reply(Tab, Item);
+ {'EXIT', _} ->
+ bad_info_reply(Tab, Item)
+ end.
+
+info_reply({'EXIT', _Reason}, Tab, Item) ->
+ bad_info_reply(Tab, Item);
+info_reply({error, _Reason}, Tab, Item) ->
+ bad_info_reply(Tab, Item);
+info_reply(Val, _Tab, _Item) ->
+ Val.
+
+bad_info_reply(_Tab, size) -> 0;
+bad_info_reply(_Tab, memory) -> 0;
+bad_info_reply(Tab, Item) -> abort({no_exists, Tab, Item}).
+
+%% Raw info about all tables
+schema() ->
+ mnesia_schema:info().
+
+%% Raw info about one tables
+schema(Tab) ->
+ mnesia_schema:info(Tab).
+
+error_description(Err) ->
+ mnesia_lib:error_desc(Err).
+
+info() ->
+ case mnesia_lib:is_running() of
+ yes ->
+ TmInfo = mnesia_tm:get_info(10000),
+ Held = system_info(held_locks),
+ Queued = system_info(lock_queue),
+
+ io:format("---> Processes holding locks <--- ~n", []),
+ lists:foreach(fun(L) -> io:format("Lock: ~p~n", [L]) end,
+ Held),
+
+ io:format( "---> Processes waiting for locks <--- ~n", []),
+ lists:foreach(fun({Oid, Op, _Pid, Tid, OwnerTid}) ->
+ io:format("Tid ~p waits for ~p lock "
+ "on oid ~p owned by ~p ~n",
+ [Tid, Op, Oid, OwnerTid])
+ end, Queued),
+ mnesia_tm:display_info(group_leader(), TmInfo),
+
+ Pat = {'_', unclear, '_'},
+ Uncertain = ets:match_object(mnesia_decision, Pat),
+
+ io:format( "---> Uncertain transactions <--- ~n", []),
+ lists:foreach(fun({Tid, _, Nodes}) ->
+ io:format("Tid ~w waits for decision "
+ "from ~w~n",
+ [Tid, Nodes])
+ end, Uncertain),
+
+ mnesia_controller:info(),
+ display_system_info(Held, Queued, TmInfo, Uncertain);
+ _ ->
+ mini_info()
+ end,
+ ok.
+
+mini_info() ->
+ io:format("===> System info in version ~p, debug level = ~p <===~n",
+ [system_info(version), system_info(debug)]),
+ Not =
+ case system_info(use_dir) of
+ true -> "";
+ false -> "NOT "
+ end,
+
+ io:format("~w. Directory ~p is ~sused.~n",
+ [system_info(schema_location), system_info(directory), Not]),
+ io:format("use fallback at restart = ~w~n",
+ [system_info(fallback_activated)]),
+ Running = system_info(running_db_nodes),
+ io:format("running db nodes = ~w~n", [Running]),
+ All = mnesia_lib:all_nodes(),
+ io:format("stopped db nodes = ~w ~n", [All -- Running]).
+
+display_system_info(Held, Queued, TmInfo, Uncertain) ->
+ mini_info(),
+ display_tab_info(),
+ S = fun(Items) -> [system_info(I) || I <- Items] end,
+
+ io:format("~w transactions committed, ~w aborted, "
+ "~w restarted, ~w logged to disc~n",
+ S([transaction_commits, transaction_failures,
+ transaction_restarts, transaction_log_writes])),
+
+ {Active, Pending} =
+ case TmInfo of
+ {timeout, _} -> {infinity, infinity};
+ {info, P, A} -> {length(A), length(P)}
+ end,
+ io:format("~w held locks, ~w in queue; "
+ "~w local transactions, ~w remote~n",
+ [length(Held), length(Queued), Active, Pending]),
+
+ Ufold = fun({_, _, Ns}, {C, Old}) ->
+ New = [N || N <- Ns, not lists:member(N, Old)],
+ {C + 1, New ++ Old}
+ end,
+ {Ucount, Unodes} = lists:foldl(Ufold, {0, []}, Uncertain),
+ io:format("~w transactions waits for other nodes: ~p~n",
+ [Ucount, Unodes]).
+
+display_tab_info() ->
+ MasterTabs = mnesia_recover:get_master_node_tables(),
+ io:format("master node tables = ~p~n", [lists:sort(MasterTabs)]),
+
+ Tabs = system_info(tables),
+
+ {Unknown, Ram, Disc, DiscOnly} =
+ lists:foldl(fun storage_count/2, {[], [], [], []}, Tabs),
+
+ io:format("remote = ~p~n", [lists:sort(Unknown)]),
+ io:format("ram_copies = ~p~n", [lists:sort(Ram)]),
+ io:format("disc_copies = ~p~n", [lists:sort(Disc)]),
+ io:format("disc_only_copies = ~p~n", [lists:sort(DiscOnly)]),
+
+ Rfoldl = fun(T, Acc) ->
+ Rpat =
+ case val({T, access_mode}) of
+ read_only ->
+ lists:sort([{A, read_only} || A <- val({T, active_replicas})]);
+ read_write ->
+ table_info(T, where_to_commit)
+ end,
+ case lists:keysearch(Rpat, 1, Acc) of
+ {value, {_Rpat, Rtabs}} ->
+ lists:keyreplace(Rpat, 1, Acc, {Rpat, [T | Rtabs]});
+ false ->
+ [{Rpat, [T]} | Acc]
+ end
+ end,
+ Repl = lists:foldl(Rfoldl, [], Tabs),
+ Rdisp = fun({Rpat, Rtabs}) -> io:format("~p = ~p~n", [Rpat, Rtabs]) end,
+ lists:foreach(Rdisp, lists:sort(Repl)).
+
+storage_count(T, {U, R, D, DO}) ->
+ case table_info(T, storage_type) of
+ unknown -> {[T | U], R, D, DO};
+ ram_copies -> {U, [T | R], D, DO};
+ disc_copies -> {U, R, [T | D], DO};
+ disc_only_copies -> {U, R, D, [T | DO]}
+ end.
+
+system_info(Item) ->
+ case catch system_info2(Item) of
+ {'EXIT',Error} -> abort(Error);
+ Other -> Other
+ end.
+
+system_info2(all) ->
+ Items = system_info_items(mnesia_lib:is_running()),
+ [{I, system_info(I)} || I <- Items];
+
+system_info2(db_nodes) ->
+ DiscNs = ?catch_val({schema, disc_copies}),
+ RamNs = ?catch_val({schema, ram_copies}),
+ if
+ list(DiscNs), list(RamNs) ->
+ DiscNs ++ RamNs;
+ true ->
+ case mnesia_schema:read_nodes() of
+ {ok, Nodes} -> Nodes;
+ {error,Reason} -> exit(Reason)
+ end
+ end;
+system_info2(running_db_nodes) ->
+ case ?catch_val({current, db_nodes}) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_lib:running_nodes();
+ Other ->
+ Other
+ end;
+
+system_info2(extra_db_nodes) ->
+ case ?catch_val(extra_db_nodes) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:get_env(extra_db_nodes);
+ Other ->
+ Other
+ end;
+
+system_info2(directory) ->
+ case ?catch_val(directory) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:get_env(dir);
+ Other ->
+ Other
+ end;
+
+system_info2(use_dir) ->
+ case ?catch_val(use_dir) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:use_dir();
+ Other ->
+ Other
+ end;
+
+system_info2(schema_location) ->
+ case ?catch_val(schema_location) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:get_env(schema_location);
+ Other ->
+ Other
+ end;
+
+system_info2(fallback_activated) ->
+ case ?catch_val(fallback_activated) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_bup:fallback_exists();
+ Other ->
+ Other
+ end;
+
+system_info2(version) ->
+ case ?catch_val(version) of
+ {'EXIT', _} ->
+ Apps = application:loaded_applications(),
+ case lists:keysearch(?APPLICATION, 1, Apps) of
+ {value, {_Name, _Desc, Version}} ->
+ Version;
+ false ->
+ %% Ensure that it does not match
+ {mnesia_not_loaded, node(), now()}
+ end;
+ Version ->
+ Version
+ end;
+
+system_info2(access_module) -> mnesia_monitor:get_env(access_module);
+system_info2(auto_repair) -> mnesia_monitor:get_env(auto_repair);
+system_info2(is_running) -> mnesia_lib:is_running();
+system_info2(backup_module) -> mnesia_monitor:get_env(backup_module);
+system_info2(event_module) -> mnesia_monitor:get_env(event_module);
+system_info2(debug) -> mnesia_monitor:get_env(debug);
+system_info2(dump_log_load_regulation) -> mnesia_monitor:get_env(dump_log_load_regulation);
+system_info2(dump_log_write_threshold) -> mnesia_monitor:get_env(dump_log_write_threshold);
+system_info2(dump_log_time_threshold) -> mnesia_monitor:get_env(dump_log_time_threshold);
+system_info2(dump_log_update_in_place) ->
+ mnesia_monitor:get_env(dump_log_update_in_place);
+system_info2(dump_log_update_in_place) ->
+ mnesia_monitor:get_env(dump_log_update_in_place);
+system_info2(max_wait_for_decision) -> mnesia_monitor:get_env(max_wait_for_decision);
+system_info2(embedded_mnemosyne) -> mnesia_monitor:get_env(embedded_mnemosyne);
+system_info2(ignore_fallback_at_startup) -> mnesia_monitor:get_env(ignore_fallback_at_startup);
+system_info2(fallback_error_function) -> mnesia_monitor:get_env(fallback_error_function);
+system_info2(log_version) -> mnesia_log:version();
+system_info2(protocol_version) -> mnesia_monitor:protocol_version();
+system_info2(schema_version) -> mnesia_schema:version(); %backward compatibility
+system_info2(tables) -> val({schema, tables});
+system_info2(local_tables) -> val({schema, local_tables});
+system_info2(master_node_tables) -> mnesia_recover:get_master_node_tables();
+system_info2(subscribers) -> mnesia_subscr:subscribers();
+system_info2(checkpoints) -> mnesia_checkpoint:checkpoints();
+system_info2(held_locks) -> mnesia_locker:get_held_locks();
+system_info2(lock_queue) -> mnesia_locker:get_lock_queue();
+system_info2(transactions) -> mnesia_tm:get_transactions();
+system_info2(transaction_failures) -> mnesia_lib:read_counter(trans_failures);
+system_info2(transaction_commits) -> mnesia_lib:read_counter(trans_commits);
+system_info2(transaction_restarts) -> mnesia_lib:read_counter(trans_restarts);
+system_info2(transaction_log_writes) -> mnesia_dumper:get_log_writes();
+
+system_info2(Item) -> exit({badarg, Item}).
+
+system_info_items(yes) ->
+ [
+ access_module,
+ auto_repair,
+ backup_module,
+ checkpoints,
+ db_nodes,
+ debug,
+ directory,
+ dump_log_load_regulation,
+ dump_log_time_threshold,
+ dump_log_update_in_place,
+ dump_log_write_threshold,
+ embedded_mnemosyne,
+ event_module,
+ extra_db_nodes,
+ fallback_activated,
+ held_locks,
+ ignore_fallback_at_startup,
+ fallback_error_function,
+ is_running,
+ local_tables,
+ lock_queue,
+ log_version,
+ master_node_tables,
+ max_wait_for_decision,
+ protocol_version,
+ running_db_nodes,
+ schema_location,
+ schema_version,
+ subscribers,
+ tables,
+ transaction_commits,
+ transaction_failures,
+ transaction_log_writes,
+ transaction_restarts,
+ transactions,
+ use_dir,
+ version
+ ];
+system_info_items(no) ->
+ [
+ auto_repair,
+ backup_module,
+ db_nodes,
+ debug,
+ directory,
+ dump_log_load_regulation,
+ dump_log_time_threshold,
+ dump_log_update_in_place,
+ dump_log_write_threshold,
+ event_module,
+ extra_db_nodes,
+ ignore_fallback_at_startup,
+ fallback_error_function,
+ is_running,
+ log_version,
+ max_wait_for_decision,
+ protocol_version,
+ running_db_nodes,
+ schema_location,
+ schema_version,
+ use_dir,
+ version
+ ].
+
+system_info() ->
+ IsRunning = mnesia_lib:is_running(),
+ case IsRunning of
+ yes ->
+ TmInfo = mnesia_tm:get_info(10000),
+ Held = system_info(held_locks),
+ Queued = system_info(lock_queue),
+ Pat = {'_', unclear, '_'},
+ Uncertain = ets:match_object(mnesia_decision, Pat),
+ display_system_info(Held, Queued, TmInfo, Uncertain);
+ _ ->
+ mini_info()
+ end,
+ IsRunning.
+
+load_mnesia_or_abort() ->
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ abort(Reason)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Database mgt
+
+create_schema(Ns) ->
+ mnesia_bup:create_schema(Ns).
+
+delete_schema(Ns) ->
+ mnesia_schema:delete_schema(Ns).
+
+backup(Opaque) ->
+ mnesia_log:backup(Opaque).
+
+backup(Opaque, Mod) ->
+ mnesia_log:backup(Opaque, Mod).
+
+traverse_backup(S, T, Fun, Acc) ->
+ mnesia_bup:traverse_backup(S, T, Fun, Acc).
+
+traverse_backup(S, SM, T, TM, F, A) ->
+ mnesia_bup:traverse_backup(S, SM, T, TM, F, A).
+
+install_fallback(Opaque) ->
+ mnesia_bup:install_fallback(Opaque).
+
+install_fallback(Opaque, Mod) ->
+ mnesia_bup:install_fallback(Opaque, Mod).
+
+uninstall_fallback() ->
+ mnesia_bup:uninstall_fallback().
+
+uninstall_fallback(Args) ->
+ mnesia_bup:uninstall_fallback(Args).
+
+activate_checkpoint(Args) ->
+ mnesia_checkpoint:activate(Args).
+
+deactivate_checkpoint(Name) ->
+ mnesia_checkpoint:deactivate(Name).
+
+backup_checkpoint(Name, Opaque) ->
+ mnesia_log:backup_checkpoint(Name, Opaque).
+
+backup_checkpoint(Name, Opaque, Mod) ->
+ mnesia_log:backup_checkpoint(Name, Opaque, Mod).
+
+restore(Opaque, Args) ->
+ mnesia_schema:restore(Opaque, Args).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt
+
+create_table(Arg) ->
+ mnesia_schema:create_table(Arg).
+create_table(Name, Arg) when list(Arg) ->
+ mnesia_schema:create_table([{name, Name}| Arg]);
+create_table(Name, Arg) ->
+ {aborted, badarg, Name, Arg}.
+
+delete_table(Tab) ->
+ mnesia_schema:delete_table(Tab).
+
+add_table_copy(Tab, N, S) ->
+ mnesia_schema:add_table_copy(Tab, N, S).
+del_table_copy(Tab, N) ->
+ mnesia_schema:del_table_copy(Tab, N).
+
+move_table_copy(Tab, From, To) ->
+ mnesia_schema:move_table(Tab, From, To).
+
+add_table_index(Tab, Ix) ->
+ mnesia_schema:add_table_index(Tab, Ix).
+del_table_index(Tab, Ix) ->
+ mnesia_schema:del_table_index(Tab, Ix).
+
+transform_table(Tab, Fun, NewA) ->
+ case catch val({Tab, record_name}) of
+ {'EXIT', Reason} ->
+ mnesia:abort(Reason);
+ OldRN ->
+ mnesia_schema:transform_table(Tab, Fun, NewA, OldRN)
+ end.
+
+transform_table(Tab, Fun, NewA, NewRN) ->
+ mnesia_schema:transform_table(Tab, Fun, NewA, NewRN).
+
+change_table_copy_type(T, N, S) ->
+ mnesia_schema:change_table_copy_type(T, N, S).
+
+clear_table(Tab) ->
+ mnesia_schema:clear_table(Tab).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt - user properties
+
+read_table_property(Tab, PropKey) ->
+ val({Tab, user_property, PropKey}).
+
+write_table_property(Tab, Prop) ->
+ mnesia_schema:write_table_property(Tab, Prop).
+
+delete_table_property(Tab, PropKey) ->
+ mnesia_schema:delete_table_property(Tab, PropKey).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt - user properties
+
+change_table_frag(Tab, FragProp) ->
+ mnesia_schema:change_table_frag(Tab, FragProp).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt - table load
+
+%% Dump a ram table to disc
+dump_tables(Tabs) ->
+ mnesia_schema:dump_tables(Tabs).
+
+%% allow the user to wait for some tables to be loaded
+wait_for_tables(Tabs, Timeout) ->
+ mnesia_controller:wait_for_tables(Tabs, Timeout).
+
+force_load_table(Tab) ->
+ case mnesia_controller:force_load_table(Tab) of
+ ok -> yes; % Backwards compatibility
+ Other -> Other
+ end.
+
+change_table_access_mode(T, Access) ->
+ mnesia_schema:change_table_access_mode(T, Access).
+
+change_table_load_order(T, O) ->
+ mnesia_schema:change_table_load_order(T, O).
+
+set_master_nodes(Nodes) when list(Nodes) ->
+ UseDir = system_info(use_dir),
+ IsRunning = system_info(is_running),
+ case IsRunning of
+ yes ->
+ CsPat = {{'_', cstruct}, '_'},
+ Cstructs0 = ?ets_match_object(mnesia_gvar, CsPat),
+ Cstructs = [Cs || {_, Cs} <- Cstructs0],
+ log_valid_master_nodes(Cstructs, Nodes, UseDir, IsRunning);
+ _NotRunning ->
+ case UseDir of
+ true ->
+ mnesia_lib:lock_table(schema),
+ Res =
+ case mnesia_schema:read_cstructs_from_disc() of
+ {ok, Cstructs} ->
+ log_valid_master_nodes(Cstructs, Nodes, UseDir, IsRunning);
+ {error, Reason} ->
+ {error, Reason}
+ end,
+ mnesia_lib:unlock_table(schema),
+ Res;
+ false ->
+ ok
+ end
+ end;
+set_master_nodes(Nodes) ->
+ {error, {bad_type, Nodes}}.
+
+log_valid_master_nodes(Cstructs, Nodes, UseDir, IsRunning) ->
+ Fun = fun(Cs) ->
+ Copies = mnesia_lib:copy_holders(Cs),
+ Valid = mnesia_lib:intersect(Nodes, Copies),
+ {Cs#cstruct.name, Valid}
+ end,
+ Args = lists:map(Fun, Cstructs),
+ mnesia_recover:log_master_nodes(Args, UseDir, IsRunning).
+
+set_master_nodes(Tab, Nodes) when list(Nodes) ->
+ UseDir = system_info(use_dir),
+ IsRunning = system_info(is_running),
+ case IsRunning of
+ yes ->
+ case ?catch_val({Tab, cstruct}) of
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ Cs ->
+ case Nodes -- mnesia_lib:copy_holders(Cs) of
+ [] ->
+ Args = [{Tab , Nodes}],
+ mnesia_recover:log_master_nodes(Args, UseDir, IsRunning);
+ BadNodes ->
+ {error, {no_exists, Tab, BadNodes}}
+ end
+ end;
+ _NotRunning ->
+ case UseDir of
+ true ->
+ mnesia_lib:lock_table(schema),
+ Res =
+ case mnesia_schema:read_cstructs_from_disc() of
+ {ok, Cstructs} ->
+ case lists:keysearch(Tab, 2, Cstructs) of
+ {value, Cs} ->
+ case Nodes -- mnesia_lib:copy_holders(Cs) of
+ [] ->
+ Args = [{Tab , Nodes}],
+ mnesia_recover:log_master_nodes(Args, UseDir, IsRunning);
+ BadNodes ->
+ {error, {no_exists, Tab, BadNodes}}
+ end;
+ false ->
+ {error, {no_exists, Tab}}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end,
+ mnesia_lib:unlock_table(schema),
+ Res;
+ false ->
+ ok
+ end
+ end;
+set_master_nodes(Tab, Nodes) ->
+ {error, {bad_type, Tab, Nodes}}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Misc admin
+
+dump_log() ->
+ mnesia_controller:sync_dump_log(user).
+
+subscribe(What) ->
+ mnesia_subscr:subscribe(self(), What).
+
+unsubscribe(What) ->
+ mnesia_subscr:unsubscribe(self(), What).
+
+report_event(Event) ->
+ mnesia_lib:report_system_event({mnesia_user, Event}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Snmp
+
+snmp_open_table(Tab, Us) ->
+ mnesia_schema:add_snmp(Tab, Us).
+
+snmp_close_table(Tab) ->
+ mnesia_schema:del_snmp(Tab).
+
+snmp_get_row(Tab, RowIndex) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_snmp_hook, get_row, [Tab, RowIndex]);
+snmp_get_row(Tab, _RowIndex) ->
+ abort({bad_type, Tab}).
+
+snmp_get_next_index(Tab, RowIndex) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_snmp_hook, get_next_index, [Tab, RowIndex]);
+snmp_get_next_index(Tab, _RowIndex) ->
+ abort({bad_type, Tab}).
+
+snmp_get_mnesia_key(Tab, RowIndex) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_snmp_hook, get_mnesia_key, [Tab, RowIndex]);
+snmp_get_mnesia_key(Tab, _RowIndex) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Textfile access
+
+load_textfile(F) ->
+ mnesia_text:load_textfile(F).
+dump_to_textfile(F) ->
+ mnesia_text:dump_to_textfile(F).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Mnemosyne exclusive
+
+get_activity_id() ->
+ get(mnesia_activity_state).
+
+put_activity_id(Activity) ->
+ mnesia_tm:put_activity_id(Activity).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.hrl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.hrl
new file mode 100644
index 0000000000..b9715ad927
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia.hrl
@@ -0,0 +1,118 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia.hrl,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+%%
+
+-define(APPLICATION, mnesia).
+
+-define(ets_lookup(Tab, Key), ets:lookup(Tab, Key)).
+-define(ets_lookup_element(Tab, Key, Pos), ets:lookup_element(Tab, Key, Pos)).
+-define(ets_insert(Tab, Rec), ets:insert(Tab, Rec)).
+-define(ets_delete(Tab, Key), ets:delete(Tab, Key)).
+-define(ets_match_delete(Tab, Pat), ets:match_delete(Tab, Pat)).
+-define(ets_match_object(Tab, Pat), ets:match_object(Tab, Pat)).
+-define(ets_match(Tab, Pat), ets:match(Tab, Pat)).
+-define(ets_info(Tab, Item), ets:info(Tab, Item)).
+-define(ets_update_counter(Tab, Key, Incr), ets:update_counter(Tab, Key, Incr)).
+-define(ets_first(Tab), ets:first(Tab)).
+-define(ets_next(Tab, Key), ets:next(Tab, Key)).
+-define(ets_last(Tab), ets:last(Tab)).
+-define(ets_prev(Tab, Key), ets:prev(Tab, Key)).
+-define(ets_slot(Tab, Pos), ets:slot(Tab, Pos)).
+-define(ets_new_table(Tab, Props), ets:new(Tab, Props)).
+-define(ets_delete_table(Tab), ets:delete(Tab)).
+-define(ets_fixtable(Tab, Bool), ets:fixtable(Tab, Bool)).
+
+-define(catch_val(Var), (catch ?ets_lookup_element(mnesia_gvar, Var, 2))).
+
+%% It's important that counter is first, since we compare tid's
+
+-record(tid,
+ {counter, %% serial no for tid
+ pid}). %% owner of tid
+
+
+-record(tidstore,
+ {store, %% current ets table for tid
+ up_stores = [], %% list of upper layer stores for nested trans
+ level = 1}). %% transaction level
+
+-define(unique_cookie, {erlang:now(), node()}).
+
+-record(cstruct, {name, % Atom
+ type = set, % set | bag
+ ram_copies = [], % [Node]
+ disc_copies = [], % [Node]
+ disc_only_copies = [], % [Node]
+ load_order = 0, % Integer
+ access_mode = read_write, % read_write | read_only
+ index = [], % [Integer]
+ snmp = [], % Snmp Ustruct
+ local_content = false, % true | false
+ record_name = {bad_record_name}, % Atom (Default = Name)
+ attributes = [key, val], % [Atom]
+ user_properties = [], % [Record]
+ frag_properties = [], % [{Key, Val]
+ cookie = ?unique_cookie, % Term
+ version = {{2, 0}, []}}). % {{Integer, Integer}, [Node]}
+
+%% Record for the head structure in Mnesia's log files
+%%
+%% The definition of this record may *NEVER* be changed
+%% since it may be written to very old backup files.
+%% By holding this record definition stable we can be
+%% able to comprahend backups from timepoint 0. It also
+%% allows us to use the backup format as an interchange
+%% format between Mnesia releases.
+
+-record(log_header,{log_kind,
+ log_version,
+ mnesia_version,
+ node,
+ now}).
+
+%% Commit records stored in the transaction log
+-record(commit, {node,
+ decision, % presume_commit | Decision
+ ram_copies = [],
+ disc_copies = [],
+ disc_only_copies = [],
+ snmp = [],
+ schema_ops = []
+ }).
+
+-record(decision, {tid,
+ outcome, % presume_abort | committed
+ disc_nodes,
+ ram_nodes}).
+
+%% Maybe cyclic wait
+-record(cyclic, {node = node(),
+ oid, % {Tab, Key}
+ op, % read | write
+ lock, % read | write
+ lucky
+ }).
+
+%% Managing conditional debug functions
+
+-ifdef(debug).
+ -define(eval_debug_fun(I, C),
+ mnesia_lib:eval_debug_fun(I, C, ?FILE, ?LINE)).
+-else.
+ -define(eval_debug_fun(I, C), ok).
+-endif.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_backup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_backup.erl
new file mode 100644
index 0000000000..a1fbb21d94
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_backup.erl
@@ -0,0 +1,195 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_backup.erl,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+%%
+%0
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% This module contains one implementation of callback functions
+%% used by Mnesia at backup and restore. The user may however
+%% write an own module the same interface as mnesia_backup and
+%% configure Mnesia so the alternate module performs the actual
+%% accesses to the backup media. This means that the user may put
+%% the backup on medias that Mnesia does not know about, possibly
+%% on hosts where Erlang is not running.
+%%
+%% The OpaqueData argument is never interpreted by other parts of
+%% Mnesia. It is the property of this module. Alternate implementations
+%% of this module may have different interpretations of OpaqueData.
+%% The OpaqueData argument given to open_write/1 and open_read/1
+%% are forwarded directly from the user.
+%%
+%% All functions must return {ok, NewOpaqueData} or {error, Reason}.
+%%
+%% The NewOpaqueData arguments returned by backup callback functions will
+%% be given as input when the next backup callback function is invoked.
+%% If any return value does not match {ok, _} the backup will be aborted.
+%%
+%% The NewOpaqueData arguments returned by restore callback functions will
+%% be given as input when the next restore callback function is invoked
+%% If any return value does not match {ok, _} the restore will be aborted.
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-module(mnesia_backup).
+-behaviour(mnesia_backup).
+
+-include_lib("kernel/include/file.hrl").
+
+-export([
+ %% Write access
+ open_write/1,
+ write/2,
+ commit_write/1,
+ abort_write/1,
+
+ %% Read access
+ open_read/1,
+ read/1,
+ close_read/1
+ ]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup callback interface
+-record(backup, {tmp_file, file, file_desc}).
+
+%% Opens backup media for write
+%%
+%% Returns {ok, OpaqueData} or {error, Reason}
+open_write(OpaqueData) ->
+ File = OpaqueData,
+ Tmp = lists:concat([File,".BUPTMP"]),
+ file:delete(Tmp),
+ file:delete(File),
+ case disk_log:open([{name, make_ref()},
+ {file, Tmp},
+ {repair, false},
+ {linkto, self()}]) of
+ {ok, Fd} ->
+ {ok, #backup{tmp_file = Tmp, file = File, file_desc = Fd}};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%% Writes BackupItems to the backup media
+%%
+%% Returns {ok, OpaqueData} or {error, Reason}
+write(OpaqueData, BackupItems) ->
+ B = OpaqueData,
+ case disk_log:log_terms(B#backup.file_desc, BackupItems) of
+ ok ->
+ {ok, B};
+ {error, Reason} ->
+ abort_write(B),
+ {error, Reason}
+ end.
+
+%% Closes the backup media after a successful backup
+%%
+%% Returns {ok, ReturnValueToUser} or {error, Reason}
+commit_write(OpaqueData) ->
+ B = OpaqueData,
+ case disk_log:sync(B#backup.file_desc) of
+ ok ->
+ case disk_log:close(B#backup.file_desc) of
+ ok ->
+ case file:rename(B#backup.tmp_file, B#backup.file) of
+ ok ->
+ {ok, B#backup.file};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%% Closes the backup media after an interrupted backup
+%%
+%% Returns {ok, ReturnValueToUser} or {error, Reason}
+abort_write(BackupRef) ->
+ Res = disk_log:close(BackupRef#backup.file_desc),
+ file:delete(BackupRef#backup.tmp_file),
+ case Res of
+ ok ->
+ {ok, BackupRef#backup.file};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restore callback interface
+
+-record(restore, {file, file_desc, cont}).
+
+%% Opens backup media for read
+%%
+%% Returns {ok, OpaqueData} or {error, Reason}
+open_read(OpaqueData) ->
+ File = OpaqueData,
+ case file:read_file_info(File) of
+ {error, Reason} ->
+ {error, Reason};
+ _FileInfo -> %% file exists
+ case disk_log:open([{file, File},
+ {name, make_ref()},
+ {repair, false},
+ {mode, read_only},
+ {linkto, self()}]) of
+ {ok, Fd} ->
+ {ok, #restore{file = File, file_desc = Fd, cont = start}};
+ {repaired, Fd, _, {badbytes, 0}} ->
+ {ok, #restore{file = File, file_desc = Fd, cont = start}};
+ {repaired, Fd, _, _} ->
+ {ok, #restore{file = File, file_desc = Fd, cont = start}};
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end.
+
+%% Reads BackupItems from the backup media
+%%
+%% Returns {ok, OpaqueData, BackupItems} or {error, Reason}
+%%
+%% BackupItems == [] is interpreted as eof
+read(OpaqueData) ->
+ R = OpaqueData,
+ Fd = R#restore.file_desc,
+ case disk_log:chunk(Fd, R#restore.cont) of
+ {error, Reason} ->
+ {error, {"Possibly truncated", Reason}};
+ eof ->
+ {ok, R, []};
+ {Cont, []} ->
+ read(R#restore{cont = Cont});
+ {Cont, BackupItems} ->
+ {ok, R#restore{cont = Cont}, BackupItems}
+ end.
+
+%% Closes the backup media after restore
+%%
+%% Returns {ok, ReturnValueToUser} or {error, Reason}
+close_read(OpaqueData) ->
+ R = OpaqueData,
+ case disk_log:close(R#restore.file_desc) of
+ ok -> {ok, R#restore.file};
+ {error, Reason} -> {error, Reason}
+ end.
+%0
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_bup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_bup.erl
new file mode 100644
index 0000000000..f03dc029cc
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_bup.erl
@@ -0,0 +1,1169 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_bup.erl,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+%%
+-module(mnesia_bup).
+-export([
+ %% Public interface
+ iterate/4,
+ read_schema/2,
+ fallback_bup/0,
+ fallback_exists/0,
+ tm_fallback_start/1,
+ create_schema/1,
+ install_fallback/1,
+ install_fallback/2,
+ uninstall_fallback/0,
+ uninstall_fallback/1,
+ traverse_backup/4,
+ traverse_backup/6,
+ make_initial_backup/3,
+ fallback_to_schema/0,
+ lookup_schema/2,
+ schema2bup/1,
+ refresh_cookie/2,
+
+ %% Internal
+ fallback_receiver/2,
+ install_fallback_master/2,
+ uninstall_fallback_master/2,
+ local_uninstall_fallback/2,
+ do_traverse_backup/7,
+ trav_apply/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [verbose/2, dbg_out/2]).
+
+-record(restore, {mode, bup_module, bup_data}).
+
+-record(fallback_args, {opaque,
+ scope = global,
+ module = mnesia_monitor:get_env(backup_module),
+ use_default_dir = true,
+ mnesia_dir,
+ fallback_bup,
+ fallback_tmp,
+ skip_tables = [],
+ keep_tables = [],
+ default_op = keep_tables
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup iterator
+
+%% Reads schema section and iterates over all records in a backup.
+%%
+%% Fun(BunchOfRecords, Header, Schema, Acc) is applied when a suitable amount
+%% of records has been collected.
+%%
+%% BunchOfRecords will be [] when the iteration is done.
+iterate(Mod, Fun, Opaque, Acc) ->
+ R = #restore{bup_module = Mod, bup_data = Opaque},
+ case catch read_schema_section(R) of
+ {error, Reason} ->
+ {error, Reason};
+ {R2, {Header, Schema, Rest}} ->
+ case catch iter(R2, Header, Schema, Fun, Acc, Rest) of
+ {ok, R3, Res} ->
+ catch safe_apply(R3, close_read, [R3#restore.bup_data]),
+ {ok, Res};
+ {error, Reason} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ {error, Reason};
+ {'EXIT', Pid, Reason} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ {error, {'EXIT', Pid, Reason}};
+ {'EXIT', Reason} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ {error, {'EXIT', Reason}}
+ end
+ end.
+
+iter(R, Header, Schema, Fun, Acc, []) ->
+ case safe_apply(R, read, [R#restore.bup_data]) of
+ {R2, []} ->
+ Res = Fun([], Header, Schema, Acc),
+ {ok, R2, Res};
+ {R2, BupItems} ->
+ iter(R2, Header, Schema, Fun, Acc, BupItems)
+ end;
+iter(R, Header, Schema, Fun, Acc, BupItems) ->
+ Acc2 = Fun(BupItems, Header, Schema, Acc),
+ iter(R, Header, Schema, Fun, Acc2, []).
+
+safe_apply(R, write, [_, Items]) when Items == [] ->
+ R;
+safe_apply(R, What, Args) ->
+ Abort = fun(Re) -> abort_restore(R, What, Args, Re) end,
+ receive
+ {'EXIT', Pid, Re} -> Abort({'EXIT', Pid, Re})
+ after 0 ->
+ Mod = R#restore.bup_module,
+ case catch apply(Mod, What, Args) of
+ {ok, Opaque, Items} when What == read ->
+ {R#restore{bup_data = Opaque}, Items};
+ {ok, Opaque} when What /= read->
+ R#restore{bup_data = Opaque};
+ {error, Re} ->
+ Abort(Re);
+ Re ->
+ Abort(Re)
+ end
+ end.
+
+abort_restore(R, What, Args, Reason) ->
+ Mod = R#restore.bup_module,
+ Opaque = R#restore.bup_data,
+ dbg_out("Restore aborted. ~p:~p~p -> ~p~n",
+ [Mod, What, Args, Reason]),
+ catch apply(Mod, close_read, [Opaque]),
+ throw({error, Reason}).
+
+fallback_to_schema() ->
+ Fname = fallback_bup(),
+ fallback_to_schema(Fname).
+
+fallback_to_schema(Fname) ->
+ Mod = mnesia_backup,
+ case read_schema(Mod, Fname) of
+ {error, Reason} ->
+ {error, Reason};
+ Schema ->
+ case catch lookup_schema(schema, Schema) of
+ {error, _} ->
+ {error, "No schema in fallback"};
+ List ->
+ {ok, fallback, List}
+ end
+ end.
+
+%% Opens Opaque reads schema and then close
+read_schema(Mod, Opaque) ->
+ R = #restore{bup_module = Mod, bup_data = Opaque},
+ case catch read_schema_section(R) of
+ {error, Reason} ->
+ {error, Reason};
+ {R2, {_Header, Schema, _}} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ Schema
+ end.
+
+%% Open backup media and extract schema
+%% rewind backup media and leave it open
+%% Returns {R, {Header, Schema}}
+read_schema_section(R) ->
+ case catch do_read_schema_section(R) of
+ {'EXIT', Reason} ->
+ catch safe_apply(R, close_read, [R#restore.bup_data]),
+ {error, {'EXIT', Reason}};
+ {error, Reason} ->
+ catch safe_apply(R, close_read, [R#restore.bup_data]),
+ {error, Reason};
+ {R2, {H, Schema, Rest}} ->
+ Schema2 = convert_schema(H#log_header.log_version, Schema),
+ {R2, {H, Schema2, Rest}}
+ end.
+
+do_read_schema_section(R) ->
+ R2 = safe_apply(R, open_read, [R#restore.bup_data]),
+ {R3, RawSchema} = safe_apply(R2, read, [R2#restore.bup_data]),
+ do_read_schema_section(R3, verify_header(RawSchema), []).
+
+do_read_schema_section(R, {ok, B, C, []}, Acc) ->
+ case safe_apply(R, read, [R#restore.bup_data]) of
+ {R2, []} ->
+ {R2, {B, Acc, []}};
+ {R2, RawSchema} ->
+ do_read_schema_section(R2, {ok, B, C, RawSchema}, Acc)
+ end;
+
+do_read_schema_section(R, {ok, B, C, [Head | Tail]}, Acc)
+ when element(1, Head) == schema ->
+ do_read_schema_section(R, {ok, B, C, Tail}, Acc ++ [Head]);
+
+do_read_schema_section(R, {ok, B, _C, Rest}, Acc) ->
+ {R, {B, Acc, Rest}};
+
+do_read_schema_section(_R, {error, Reason}, _Acc) ->
+ {error, Reason}.
+
+verify_header([H | RawSchema]) when record(H, log_header) ->
+ Current = mnesia_log:backup_log_header(),
+ if
+ H#log_header.log_kind == Current#log_header.log_kind ->
+ Versions = ["0.1", "1.1", Current#log_header.log_version],
+ case lists:member(H#log_header.log_version, Versions) of
+ true ->
+ {ok, H, Current, RawSchema};
+ false ->
+ {error, {"Bad header version. Cannot be used as backup.", H}}
+ end;
+ true ->
+ {error, {"Bad kind of header. Cannot be used as backup.", H}}
+ end;
+verify_header(RawSchema) ->
+ {error, {"Missing header. Cannot be used as backup.", catch hd(RawSchema)}}.
+
+refresh_cookie(Schema, NewCookie) ->
+ case lists:keysearch(schema, 2, Schema) of
+ {value, {schema, schema, List}} ->
+ Cs = mnesia_schema:list2cs(List),
+ Cs2 = Cs#cstruct{cookie = NewCookie},
+ Item = {schema, schema, mnesia_schema:cs2list(Cs2)},
+ lists:keyreplace(schema, 2, Schema, Item);
+
+ false ->
+ Reason = "No schema found. Cannot be used as backup.",
+ throw({error, {Reason, Schema}})
+ end.
+
+%% Convert schema items from an external backup
+%% If backup format is the latest, no conversion is needed
+%% All supported backup formats should have their converters
+%% here as separate function clauses.
+convert_schema("0.1", Schema) ->
+ convert_0_1(Schema);
+convert_schema("1.1", Schema) ->
+ %% The new backup format is a pure extension of the old one
+ Current = mnesia_log:backup_log_header(),
+ convert_schema(Current#log_header.log_version, Schema);
+convert_schema(Latest, Schema) ->
+ H = mnesia_log:backup_log_header(),
+ if
+ H#log_header.log_version == Latest ->
+ Schema;
+ true ->
+ Reason = "Bad backup header version. Cannot convert schema.",
+ throw({error, {Reason, H}})
+ end.
+
+%% Backward compatibility for 0.1
+convert_0_1(Schema) ->
+ case lists:keysearch(schema, 2, Schema) of
+ {value, {schema, schema, List}} ->
+ Schema2 = lists:keydelete(schema, 2, Schema),
+ Cs = mnesia_schema:list2cs(List),
+ convert_0_1(Schema2, [], Cs);
+ false ->
+ List = mnesia_schema:get_initial_schema(disc_copies, [node()]),
+ Cs = mnesia_schema:list2cs(List),
+ convert_0_1(Schema, [], Cs)
+ end.
+
+convert_0_1([{schema, cookie, Cookie} | Schema], Acc, Cs) ->
+ convert_0_1(Schema, Acc, Cs#cstruct{cookie = Cookie});
+convert_0_1([{schema, db_nodes, DbNodes} | Schema], Acc, Cs) ->
+ convert_0_1(Schema, Acc, Cs#cstruct{disc_copies = DbNodes});
+convert_0_1([{schema, version, Version} | Schema], Acc, Cs) ->
+ convert_0_1(Schema, Acc, Cs#cstruct{version = Version});
+convert_0_1([{schema, Tab, Def} | Schema], Acc, Cs) ->
+ Head =
+ case lists:keysearch(index, 1, Def) of
+ {value, {index, PosList}} ->
+ %% Remove the snmp "index"
+ P = PosList -- [snmp],
+ Def2 = lists:keyreplace(index, 1, Def, {index, P}),
+ {schema, Tab, Def2};
+ false ->
+ {schema, Tab, Def}
+ end,
+ convert_0_1(Schema, [Head | Acc], Cs);
+convert_0_1([Head | Schema], Acc, Cs) ->
+ convert_0_1(Schema, [Head | Acc], Cs);
+convert_0_1([], Acc, Cs) ->
+ [schema2bup({schema, schema, Cs}) | Acc].
+
+%% Returns Val or throw error
+lookup_schema(Key, Schema) ->
+ case lists:keysearch(Key, 2, Schema) of
+ {value, {schema, Key, Val}} -> Val;
+ false -> throw({error, {"Cannot lookup", Key}})
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup compatibility
+
+%% Convert internal schema items to backup dito
+schema2bup({schema, Tab}) ->
+ {schema, Tab};
+schema2bup({schema, Tab, TableDef}) ->
+ {schema, Tab, mnesia_schema:cs2list(TableDef)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Create schema on the given nodes
+%% Requires that old schemas has been deleted
+%% Returns ok | {error, Reason}
+create_schema([]) ->
+ create_schema([node()]);
+create_schema(Ns) when list(Ns) ->
+ case is_set(Ns) of
+ true ->
+ create_schema(Ns, mnesia_schema:ensure_no_schema(Ns));
+ false ->
+ {error, {combine_error, Ns}}
+ end;
+create_schema(Ns) ->
+ {error, {badarg, Ns}}.
+
+is_set(List) when list(List) ->
+ ordsets:is_set(lists:sort(List));
+is_set(_) ->
+ false.
+
+create_schema(Ns, ok) ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case mnesia_monitor:get_env(schema_location) of
+ ram ->
+ {error, {has_no_disc, node()}};
+ _ ->
+ case mnesia_schema:opt_create_dir(true, mnesia_lib:dir()) of
+ {error, What} ->
+ {error, What};
+ ok ->
+ Mod = mnesia_backup,
+ Str = mk_str(),
+ File = mnesia_lib:dir(Str),
+ file:delete(File),
+ case catch make_initial_backup(Ns, File, Mod) of
+ {ok, _Res} ->
+ case do_install_fallback(File, Mod) of
+ ok ->
+ file:delete(File),
+ ok;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+create_schema(_Ns, {error, Reason}) ->
+ {error, Reason};
+create_schema(_Ns, Reason) ->
+ {error, Reason}.
+
+mk_str() ->
+ Now = [integer_to_list(I) || I <- tuple_to_list(now())],
+ lists:concat([node()] ++ Now ++ ".TMP").
+
+make_initial_backup(Ns, Opaque, Mod) ->
+ Schema = [{schema, schema, mnesia_schema:get_initial_schema(disc_copies, Ns)}],
+ O2 = do_apply(Mod, open_write, [Opaque], Opaque),
+ O3 = do_apply(Mod, write, [O2, [mnesia_log:backup_log_header()]], O2),
+ O4 = do_apply(Mod, write, [O3, Schema], O3),
+ O5 = do_apply(Mod, commit_write, [O4], O4),
+ {ok, O5}.
+
+do_apply(_, write, [_, Items], Opaque) when Items == [] ->
+ Opaque;
+do_apply(Mod, What, Args, _Opaque) ->
+ case catch apply(Mod, What, Args) of
+ {ok, Opaque2} -> Opaque2;
+ {error, Reason} -> throw({error, Reason});
+ {'EXIT', Reason} -> throw({error, {'EXIT', Reason}})
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restore
+
+%% Restore schema and possibly other tables from a backup
+%% and replicate them to the necessary nodes
+%% Requires that old schemas has been deleted
+%% Returns ok | {error, Reason}
+install_fallback(Opaque) ->
+ install_fallback(Opaque, []).
+
+install_fallback(Opaque, Args) ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ do_install_fallback(Opaque, Args);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+do_install_fallback(Opaque, Mod) when atom(Mod) ->
+ do_install_fallback(Opaque, [{module, Mod}]);
+do_install_fallback(Opaque, Args) when list(Args) ->
+ case check_fallback_args(Args, #fallback_args{opaque = Opaque}) of
+ {ok, FA} ->
+ do_install_fallback(FA);
+ {error, Reason} ->
+ {error, Reason}
+ end;
+do_install_fallback(_Opaque, Args) ->
+ {error, {badarg, Args}}.
+
+check_fallback_args([Arg | Tail], FA) ->
+ case catch check_fallback_arg_type(Arg, FA) of
+ {'EXIT', _Reason} ->
+ {error, {badarg, Arg}};
+ FA2 ->
+ check_fallback_args(Tail, FA2)
+ end;
+check_fallback_args([], FA) ->
+ {ok, FA}.
+
+check_fallback_arg_type(Arg, FA) ->
+ case Arg of
+ {scope, global} ->
+ FA#fallback_args{scope = global};
+ {scope, local} ->
+ FA#fallback_args{scope = local};
+ {module, Mod} ->
+ Mod2 = mnesia_monitor:do_check_type(backup_module, Mod),
+ FA#fallback_args{module = Mod2};
+ {mnesia_dir, Dir} ->
+ FA#fallback_args{mnesia_dir = Dir,
+ use_default_dir = false};
+ {keep_tables, Tabs} ->
+ atom_list(Tabs),
+ FA#fallback_args{keep_tables = Tabs};
+ {skip_tables, Tabs} ->
+ atom_list(Tabs),
+ FA#fallback_args{skip_tables = Tabs};
+ {default_op, keep_tables} ->
+ FA#fallback_args{default_op = keep_tables};
+ {default_op, skip_tables} ->
+ FA#fallback_args{default_op = skip_tables}
+ end.
+
+atom_list([H | T]) when atom(H) ->
+ atom_list(T);
+atom_list([]) ->
+ ok.
+
+do_install_fallback(FA) ->
+ Pid = spawn_link(?MODULE, install_fallback_master, [self(), FA]),
+ Res =
+ receive
+ {'EXIT', Pid, Reason} -> % if appl has trapped exit
+ {error, {'EXIT', Reason}};
+ {Pid, Res2} ->
+ case Res2 of
+ {ok, _} ->
+ ok;
+ {error, Reason} ->
+ {error, {"Cannot install fallback", Reason}}
+ end
+ end,
+ Res.
+
+install_fallback_master(ClientPid, FA) ->
+ process_flag(trap_exit, true),
+ State = {start, FA},
+ Opaque = FA#fallback_args.opaque,
+ Mod = FA#fallback_args.module,
+ Res = (catch iterate(Mod, fun restore_recs/4, Opaque, State)),
+ unlink(ClientPid),
+ ClientPid ! {self(), Res},
+ exit(shutdown).
+
+restore_recs(_, _, _, stop) ->
+ throw({error, "restore_recs already stopped"});
+
+restore_recs(Recs, Header, Schema, {start, FA}) ->
+ %% No records in backup
+ Schema2 = convert_schema(Header#log_header.log_version, Schema),
+ CreateList = lookup_schema(schema, Schema2),
+ case catch mnesia_schema:list2cs(CreateList) of
+ {'EXIT', Reason} ->
+ throw({error, {"Bad schema in restore_recs", Reason}});
+ Cs ->
+ Ns = get_fallback_nodes(FA, Cs#cstruct.disc_copies),
+ global:set_lock({{mnesia_table_lock, schema}, self()}, Ns, infinity),
+ Args = [self(), FA],
+ Pids = [spawn_link(N, ?MODULE, fallback_receiver, Args) || N <- Ns],
+ send_fallback(Pids, {start, Header, Schema2}),
+ Res = restore_recs(Recs, Header, Schema2, Pids),
+ global:del_lock({{mnesia_table_lock, schema}, self()}, Ns),
+ Res
+ end;
+
+restore_recs([], _Header, _Schema, Pids) ->
+ send_fallback(Pids, swap),
+ send_fallback(Pids, stop),
+ stop;
+
+restore_recs(Recs, _, _, Pids) ->
+ send_fallback(Pids, {records, Recs}),
+ Pids.
+
+get_fallback_nodes(FA, Ns) ->
+ This = node(),
+ case lists:member(This, Ns) of
+ true ->
+ case FA#fallback_args.scope of
+ global -> Ns;
+ local -> [This]
+ end;
+ false ->
+ throw({error, {"No disc resident schema on local node", Ns}})
+ end.
+
+send_fallback(Pids, Msg) when list(Pids), Pids /= [] ->
+ lists:foreach(fun(Pid) -> Pid ! {self(), Msg} end, Pids),
+ rec_answers(Pids, []).
+
+rec_answers([], Acc) ->
+ case {lists:keysearch(error, 1, Acc), mnesia_lib:uniq(Acc)} of
+ {{value, {error, Val}}, _} -> throw({error, Val});
+ {_, [SameAnswer]} -> SameAnswer;
+ {_, Other} -> throw({error, {"Different answers", Other}})
+ end;
+rec_answers(Pids, Acc) ->
+ receive
+ {'EXIT', Pid, stopped} ->
+ Pids2 = lists:delete(Pid, Pids),
+ rec_answers(Pids2, [stopped|Acc]);
+ {'EXIT', Pid, Reason} ->
+ Pids2 = lists:delete(Pid, Pids),
+ rec_answers(Pids2, [{error, {'EXIT', Pid, Reason}}|Acc]);
+ {Pid, Reply} ->
+ Pids2 = lists:delete(Pid, Pids),
+ rec_answers(Pids2, [Reply|Acc])
+ end.
+
+fallback_exists() ->
+ Fname = fallback_bup(),
+ fallback_exists(Fname).
+
+fallback_exists(Fname) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_lib:exists(Fname);
+ false ->
+ case ?catch_val(active_fallback) of
+ {'EXIT', _} -> false;
+ Bool -> Bool
+ end
+ end.
+
+fallback_name() -> "FALLBACK.BUP".
+fallback_bup() -> mnesia_lib:dir(fallback_name()).
+
+fallback_tmp_name() -> "FALLBACK.TMP".
+%% fallback_full_tmp_name() -> mnesia_lib:dir(fallback_tmp_name()).
+
+fallback_receiver(Master, FA) ->
+ process_flag(trap_exit, true),
+
+ case catch register(mnesia_fallback, self()) of
+ {'EXIT', _} ->
+ Reason = {already_exists, node()},
+ local_fallback_error(Master, Reason);
+ true ->
+ FA2 = check_fallback_dir(Master, FA),
+ Bup = FA2#fallback_args.fallback_bup,
+ case mnesia_lib:exists(Bup) of
+ true ->
+ Reason2 = {already_exists, node()},
+ local_fallback_error(Master, Reason2);
+ false ->
+ Mod = mnesia_backup,
+ Tmp = FA2#fallback_args.fallback_tmp,
+ R = #restore{mode = replace,
+ bup_module = Mod,
+ bup_data = Tmp},
+ file:delete(Tmp),
+ case catch fallback_receiver_loop(Master, R, FA2, schema) of
+ {error, Reason} ->
+ local_fallback_error(Master, Reason);
+ Other ->
+ exit(Other)
+ end
+ end
+ end.
+
+local_fallback_error(Master, Reason) ->
+ Master ! {self(), {error, Reason}},
+ unlink(Master),
+ exit(Reason).
+
+check_fallback_dir(Master, FA) ->
+ case mnesia:system_info(schema_location) of
+ ram ->
+ Reason = {has_no_disc, node()},
+ local_fallback_error(Master, Reason);
+ _ ->
+ Dir = check_fallback_dir_arg(Master, FA),
+ Bup = filename:join([Dir, fallback_name()]),
+ Tmp = filename:join([Dir, fallback_tmp_name()]),
+ FA#fallback_args{fallback_bup = Bup,
+ fallback_tmp = Tmp,
+ mnesia_dir = Dir}
+ end.
+
+check_fallback_dir_arg(Master, FA) ->
+ case FA#fallback_args.use_default_dir of
+ true ->
+ mnesia_lib:dir();
+ false when FA#fallback_args.scope == local ->
+ Dir = FA#fallback_args.mnesia_dir,
+ case catch mnesia_monitor:do_check_type(dir, Dir) of
+ {'EXIT', _R} ->
+ Reason = {badarg, {dir, Dir}, node()},
+ local_fallback_error(Master, Reason);
+ AbsDir->
+ AbsDir
+ end;
+ false when FA#fallback_args.scope == global ->
+ Reason = {combine_error, global, dir, node()},
+ local_fallback_error(Master, Reason)
+ end.
+
+fallback_receiver_loop(Master, R, FA, State) ->
+ receive
+ {Master, {start, Header, Schema}} when State == schema ->
+ Dir = FA#fallback_args.mnesia_dir,
+ throw_bad_res(ok, mnesia_schema:opt_create_dir(true, Dir)),
+ R2 = safe_apply(R, open_write, [R#restore.bup_data]),
+ R3 = safe_apply(R2, write, [R2#restore.bup_data, [Header]]),
+ BupSchema = [schema2bup(S) || S <- Schema],
+ R4 = safe_apply(R3, write, [R3#restore.bup_data, BupSchema]),
+ Master ! {self(), ok},
+ fallback_receiver_loop(Master, R4, FA, records);
+
+ {Master, {records, Recs}} when State == records ->
+ R2 = safe_apply(R, write, [R#restore.bup_data, Recs]),
+ Master ! {self(), ok},
+ fallback_receiver_loop(Master, R2, FA, records);
+
+ {Master, swap} when State /= schema ->
+ ?eval_debug_fun({?MODULE, fallback_receiver_loop, pre_swap}, []),
+ safe_apply(R, commit_write, [R#restore.bup_data]),
+ Bup = FA#fallback_args.fallback_bup,
+ Tmp = FA#fallback_args.fallback_tmp,
+ throw_bad_res(ok, file:rename(Tmp, Bup)),
+ catch mnesia_lib:set(active_fallback, true),
+ ?eval_debug_fun({?MODULE, fallback_receiver_loop, post_swap}, []),
+ Master ! {self(), ok},
+ fallback_receiver_loop(Master, R, FA, stop);
+
+ {Master, stop} when State == stop ->
+ stopped;
+
+ Msg ->
+ safe_apply(R, abort_write, [R#restore.bup_data]),
+ Tmp = FA#fallback_args.fallback_tmp,
+ file:delete(Tmp),
+ throw({error, "Unexpected msg fallback_receiver_loop", Msg})
+ end.
+
+throw_bad_res(Expected, Expected) -> Expected;
+throw_bad_res(_Expected, {error, Actual}) -> throw({error, Actual});
+throw_bad_res(_Expected, Actual) -> throw({error, Actual}).
+
+-record(local_tab, {name, storage_type, dets_args, open, close, add, record_name}).
+
+tm_fallback_start(IgnoreFallback) ->
+ mnesia_schema:lock_schema(),
+ Res = do_fallback_start(fallback_exists(), IgnoreFallback),
+ mnesia_schema: unlock_schema(),
+ case Res of
+ ok -> ok;
+ {error, Reason} -> exit(Reason)
+ end.
+
+do_fallback_start(false, _IgnoreFallback) ->
+ ok;
+do_fallback_start(true, true) ->
+ verbose("Ignoring fallback at startup, but leaving it active...~n", []),
+ mnesia_lib:set(active_fallback, true),
+ ok;
+do_fallback_start(true, false) ->
+ verbose("Starting from fallback...~n", []),
+
+ Fname = fallback_bup(),
+ Mod = mnesia_backup,
+ Ets = ?ets_new_table(mnesia_local_tables, [set, public, {keypos, 2}]),
+ case catch iterate(Mod, fun restore_tables/4, Fname, {start, Ets}) of
+ {ok, Res} ->
+ case Res of
+ {local, _, LT} -> %% Close the last file
+ (LT#local_tab.close)(LT);
+ _ ->
+ ignore
+ end,
+ List = ?ets_match_object(Ets, '_'),
+ Tabs = [L#local_tab.name || L <- List, L#local_tab.name /= schema],
+ ?ets_delete_table(Ets),
+ mnesia_lib:swap_tmp_files(Tabs),
+ catch dets:close(schema),
+ Tmp = mnesia_lib:tab2tmp(schema),
+ Dat = mnesia_lib:tab2dat(schema),
+ case file:rename(Tmp, Dat) of
+ ok ->
+ file:delete(Fname),
+ ok;
+ {error, Reason} ->
+ file:delete(Tmp),
+ {error, {"Cannot start from fallback. Rename error.", Reason}}
+ end;
+ {error, Reason} ->
+ {error, {"Cannot start from fallback", Reason}};
+ {'EXIT', Reason} ->
+ {error, {"Cannot start from fallback", Reason}}
+ end.
+
+restore_tables(Recs, Header, Schema, {start, LocalTabs}) ->
+ Dir = mnesia_lib:dir(),
+ OldDir = filename:join([Dir, "OLD_DIR"]),
+ mnesia_schema:purge_dir(OldDir, []),
+ mnesia_schema:purge_dir(Dir, [fallback_name()]),
+ init_dat_files(Schema, LocalTabs),
+ State = {new, LocalTabs},
+ restore_tables(Recs, Header, Schema, State);
+restore_tables([Rec | Recs], Header, Schema, {new, LocalTabs}) ->
+ Tab = element(1, Rec),
+ case ?ets_lookup(LocalTabs, Tab) of
+ [] ->
+ State = {not_local, LocalTabs, Tab},
+ restore_tables(Recs, Header, Schema, State);
+ [L] when record(L, local_tab) ->
+ (L#local_tab.open)(Tab, L),
+ State = {local, LocalTabs, L},
+ restore_tables([Rec | Recs], Header, Schema, State)
+ end;
+restore_tables([Rec | Recs], Header, Schema, S = {not_local, LocalTabs, PrevTab}) ->
+ Tab = element(1, Rec),
+ if
+ Tab == PrevTab ->
+ restore_tables(Recs, Header, Schema, S);
+ true ->
+ State = {new, LocalTabs},
+ restore_tables([Rec | Recs], Header, Schema, State)
+ end;
+restore_tables([Rec | Recs], Header, Schema, State = {local, LocalTabs, L}) ->
+ Tab = element(1, Rec),
+ if
+ Tab == L#local_tab.name ->
+ Key = element(2, Rec),
+ (L#local_tab.add)(Tab, Key, Rec, L),
+ restore_tables(Recs, Header, Schema, State);
+ true ->
+ (L#local_tab.close)(L),
+ NState = {new, LocalTabs},
+ restore_tables([Rec | Recs], Header, Schema, NState)
+ end;
+restore_tables([], _Header, _Schema, State) ->
+ State.
+
+%% Creates all neccessary dat files and inserts
+%% the table definitions in the schema table
+%%
+%% Returns a list of local_tab tuples for all local tables
+init_dat_files(Schema, LocalTabs) ->
+ Fname = mnesia_lib:tab2tmp(schema),
+ Args = [{file, Fname}, {keypos, 2}, {type, set}],
+ case dets:open_file(schema, Args) of % Assume schema lock
+ {ok, _} ->
+ create_dat_files(Schema, LocalTabs),
+ dets:close(schema),
+ LocalTab = #local_tab{name = schema,
+ storage_type = disc_copies,
+ dets_args = Args,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = schema},
+ ?ets_insert(LocalTabs, LocalTab);
+ {error, Reason} ->
+ throw({error, {"Cannot open file", schema, Args, Reason}})
+ end.
+
+create_dat_files([{schema, schema, TabDef} | Tail], LocalTabs) ->
+ ok = dets:insert(schema, {schema, schema, TabDef}),
+ create_dat_files(Tail, LocalTabs);
+create_dat_files([{schema, Tab, TabDef} | Tail], LocalTabs) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ ok = dets:insert(schema, {schema, Tab, TabDef}),
+ RecName = Cs#cstruct.record_name,
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ cleanup_dat_file(Tab),
+ create_dat_files(Tail, LocalTabs);
+ disc_only_copies ->
+ Fname = mnesia_lib:tab2tmp(Tab),
+ Args = [{file, Fname}, {keypos, 2},
+ {type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)}],
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, _} ->
+ mnesia_lib:dets_sync_close(Tab),
+ LocalTab = #local_tab{name = Tab,
+ storage_type = disc_only_copies,
+ dets_args = Args,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = RecName},
+ ?ets_insert(LocalTabs, LocalTab),
+ create_dat_files(Tail, LocalTabs);
+ {error, Reason} ->
+ throw({error, {"Cannot open file", Tab, Args, Reason}})
+ end;
+ ram_copies ->
+ %% Create .DCD if needed in open_media in case any ram_copies
+ %% are backed up.
+ LocalTab = #local_tab{name = Tab,
+ storage_type = ram_copies,
+ dets_args = ignore,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = RecName},
+ ?ets_insert(LocalTabs, LocalTab),
+ create_dat_files(Tail, LocalTabs);
+ Storage ->
+ %% Create DCD
+ Fname = mnesia_lib:tab2dcd(Tab),
+ file:delete(Fname),
+ Log = mnesia_log:open_log(fallback_tab, mnesia_log:dcd_log_header(),
+ Fname, false),
+ LocalTab = #local_tab{name = Tab,
+ storage_type = Storage,
+ dets_args = ignore,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = RecName},
+ mnesia_log:close_log(Log),
+ ?ets_insert(LocalTabs, LocalTab),
+ create_dat_files(Tail, LocalTabs)
+ end;
+create_dat_files([{schema, Tab} | Tail], LocalTabs) ->
+ cleanup_dat_file(Tab),
+ create_dat_files(Tail, LocalTabs);
+create_dat_files([], _LocalTabs) ->
+ ok.
+
+cleanup_dat_file(Tab) ->
+ ok = dets:delete(schema, {schema, Tab}),
+ mnesia_lib:cleanup_tmp_files([Tab]).
+
+open_media(Tab, LT) ->
+ case LT#local_tab.storage_type of
+ disc_only_copies ->
+ Args = LT#local_tab.dets_args,
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, _} -> ok;
+ {error, Reason} ->
+ throw({error, {"Cannot open file", Tab, Args, Reason}})
+ end;
+ ram_copies ->
+ %% Create .DCD as ram_copies backed up.
+ FnameDCD = mnesia_lib:tab2dcd(Tab),
+ file:delete(FnameDCD),
+ Log = mnesia_log:open_log(fallback_tab,
+ mnesia_log:dcd_log_header(),
+ FnameDCD, false),
+ mnesia_log:close_log(Log),
+
+ %% Create .DCL
+ Fname = mnesia_lib:tab2dcl(Tab),
+ file:delete(Fname),
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ Fname, false, false,
+ read_write);
+ _ ->
+ Fname = mnesia_lib:tab2dcl(Tab),
+ file:delete(Fname),
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ Fname, false, false,
+ read_write)
+ end.
+close_media(L) ->
+ Tab = L#local_tab.name,
+ case L#local_tab.storage_type of
+ disc_only_copies ->
+ mnesia_lib:dets_sync_close(Tab);
+ _ ->
+ mnesia_log:close_log({?MODULE,Tab})
+ end.
+
+add_to_media(Tab, Key, Rec, L) ->
+ RecName = L#local_tab.record_name,
+ case L#local_tab.storage_type of
+ disc_only_copies ->
+ case Rec of
+ {Tab, Key} ->
+ ok = dets:delete(Tab, Key);
+ (Rec) when Tab == RecName ->
+ ok = dets:insert(Tab, Rec);
+ (Rec) ->
+ Rec2 = setelement(1, Rec, RecName),
+ ok = dets:insert(Tab, Rec2)
+ end;
+ _ ->
+ Log = {?MODULE, Tab},
+ case Rec of
+ {Tab, Key} ->
+ mnesia_log:append(Log, {{Tab, Key}, {Tab, Key}, delete});
+ (Rec) when Tab == RecName ->
+ mnesia_log:append(Log, {{Tab, Key}, Rec, write});
+ (Rec) ->
+ Rec2 = setelement(1, Rec, RecName),
+ mnesia_log:append(Log, {{Tab, Key}, Rec2, write})
+ end
+ end.
+
+uninstall_fallback() ->
+ uninstall_fallback([{scope, global}]).
+
+uninstall_fallback(Args) ->
+ case check_fallback_args(Args, #fallback_args{}) of
+ {ok, FA} ->
+ do_uninstall_fallback(FA);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+do_uninstall_fallback(FA) ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ Pid = spawn_link(?MODULE, uninstall_fallback_master, [self(), FA]),
+ receive
+ {'EXIT', Pid, Reason} -> % if appl has trapped exit
+ {error, {'EXIT', Reason}};
+ {Pid, Res} ->
+ Res
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+uninstall_fallback_master(ClientPid, FA) ->
+ process_flag(trap_exit, true),
+
+ FA2 = check_fallback_dir(ClientPid, FA), % May exit
+ Bup = FA2#fallback_args.fallback_bup,
+ case fallback_to_schema(Bup) of
+ {ok, fallback, List} ->
+ Cs = mnesia_schema:list2cs(List),
+ case catch get_fallback_nodes(FA, Cs#cstruct.disc_copies) of
+ Ns when list(Ns) ->
+ do_uninstall(ClientPid, Ns, FA);
+ {error, Reason} ->
+ local_fallback_error(ClientPid, Reason)
+ end;
+ {error, Reason} ->
+ local_fallback_error(ClientPid, Reason)
+ end.
+
+do_uninstall(ClientPid, Ns, FA) ->
+ Args = [self(), FA],
+ global:set_lock({{mnesia_table_lock, schema}, self()}, Ns, infinity),
+ Pids = [spawn_link(N, ?MODULE, local_uninstall_fallback, Args) || N <- Ns],
+ Res = do_uninstall(ClientPid, Pids, [], [], ok),
+ global:del_lock({{mnesia_table_lock, schema}, self()}, Ns),
+ ClientPid ! {self(), Res},
+ unlink(ClientPid),
+ exit(shutdown).
+
+do_uninstall(ClientPid, [Pid | Pids], GoodPids, BadNodes, Res) ->
+ receive
+ %% {'EXIT', ClientPid, _} ->
+ %% client_exit;
+ {'EXIT', Pid, Reason} ->
+ BadNode = node(Pid),
+ BadRes = {error, {"Uninstall fallback", BadNode, Reason}},
+ do_uninstall(ClientPid, Pids, GoodPids, [BadNode | BadNodes], BadRes);
+ {Pid, {error, Reason}} ->
+ BadNode = node(Pid),
+ BadRes = {error, {"Uninstall fallback", BadNode, Reason}},
+ do_uninstall(ClientPid, Pids, GoodPids, [BadNode | BadNodes], BadRes);
+ {Pid, started} ->
+ do_uninstall(ClientPid, Pids, [Pid | GoodPids], BadNodes, Res)
+ end;
+do_uninstall(ClientPid, [], GoodPids, [], ok) ->
+ lists:foreach(fun(Pid) -> Pid ! {self(), do_uninstall} end, GoodPids),
+ rec_uninstall(ClientPid, GoodPids, ok);
+do_uninstall(_ClientPid, [], GoodPids, BadNodes, BadRes) ->
+ lists:foreach(fun(Pid) -> exit(Pid, shutdown) end, GoodPids),
+ {error, {node_not_running, BadNodes, BadRes}}.
+
+local_uninstall_fallback(Master, FA) ->
+ %% Don't trap exit
+
+ register(mnesia_fallback, self()), % May exit
+ FA2 = check_fallback_dir(Master, FA), % May exit
+ Master ! {self(), started},
+
+ receive
+ {Master, do_uninstall} ->
+ ?eval_debug_fun({?MODULE, uninstall_fallback2, pre_delete}, []),
+ catch mnesia_lib:set(active_fallback, false),
+ Tmp = FA2#fallback_args.fallback_tmp,
+ Bup = FA2#fallback_args.fallback_bup,
+ file:delete(Tmp),
+ Res =
+ case fallback_exists(Bup) of
+ true -> file:delete(Bup);
+ false -> ok
+ end,
+ ?eval_debug_fun({?MODULE, uninstall_fallback2, post_delete}, []),
+ Master ! {self(), Res},
+ unlink(Master),
+ exit(normal)
+ end.
+
+rec_uninstall(ClientPid, [Pid | Pids], AccRes) ->
+ receive
+ %% {'EXIT', ClientPid, _} ->
+ %% exit(shutdown);
+ {'EXIT', Pid, R} ->
+ Reason = {node_not_running, {node(Pid), R}},
+ rec_uninstall(ClientPid, Pids, {error, Reason});
+ {Pid, ok} ->
+ rec_uninstall(ClientPid, Pids, AccRes);
+ {Pid, BadRes} ->
+ rec_uninstall(ClientPid, Pids, BadRes)
+ end;
+rec_uninstall(ClientPid, [], Res) ->
+ ClientPid ! {self(), Res},
+ unlink(ClientPid),
+ exit(normal).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup traversal
+
+%% Iterate over a backup and produce a new backup.
+%% Fun(BackupItem, Acc) is applied for each BackupItem.
+%%
+%% Valid BackupItems are:
+%%
+%% {schema, Tab} Table to be deleted
+%% {schema, Tab, CreateList} Table to be created, CreateList may be empty
+%% {schema, db_nodes, DbNodes}List of nodes, defaults to [node()] OLD
+%% {schema, version, Version} Schema version OLD
+%% {schema, cookie, Cookie} Unique schema cookie OLD
+%% {Tab, Key} Oid for record to be deleted
+%% Record Record to be inserted.
+%%
+%% The Fun must return a tuple {BackupItems, NewAcc}
+%% where BackupItems is a list of valid BackupItems and
+%% NewAcc is a new accumulator value. Once BackupItems
+%% that not are schema related has been returned, no more schema
+%% items may be returned. The schema related items must always be
+%% first in the backup.
+%%
+%% If TargetMod == read_only, no new backup will be created.
+%%
+%% Opening of the source media will be performed by
+%% to SourceMod:open_read(Source)
+%%
+%% Opening of the target media will be performed by
+%% to TargetMod:open_write(Target)
+traverse_backup(Source, Target, Fun, Acc) ->
+ Mod = mnesia_monitor:get_env(backup_module),
+ traverse_backup(Source, Mod, Target, Mod, Fun, Acc).
+
+traverse_backup(Source, SourceMod, Target, TargetMod, Fun, Acc) ->
+ Args = [self(), Source, SourceMod, Target, TargetMod, Fun, Acc],
+ Pid = spawn_link(?MODULE, do_traverse_backup, Args),
+ receive
+ {'EXIT', Pid, Reason} ->
+ {error, {"Backup traversal crashed", Reason}};
+ {iter_done, Pid, Res} ->
+ Res
+ end.
+
+do_traverse_backup(ClientPid, Source, SourceMod, Target, TargetMod, Fun, Acc) ->
+ process_flag(trap_exit, true),
+ Iter =
+ if
+ TargetMod /= read_only ->
+ case catch do_apply(TargetMod, open_write, [Target], Target) of
+ {error, Error} ->
+ unlink(ClientPid),
+ ClientPid ! {iter_done, self(), {error, Error}},
+ exit(Error);
+ Else -> Else
+ end;
+ true ->
+ ignore
+ end,
+ A = {start, Fun, Acc, TargetMod, Iter},
+ Res =
+ case iterate(SourceMod, fun trav_apply/4, Source, A) of
+ {ok, {iter, _, Acc2, _, Iter2}} when TargetMod /= read_only ->
+ case catch do_apply(TargetMod, commit_write, [Iter2], Iter2) of
+ {error, Reason} ->
+ {error, Reason};
+ _ ->
+ {ok, Acc2}
+ end;
+ {ok, {iter, _, Acc2, _, _}} ->
+ {ok, Acc2};
+ {error, Reason} when TargetMod /= read_only->
+ catch do_apply(TargetMod, abort_write, [Iter], Iter),
+ {error, {"Backup traversal failed", Reason}};
+ {error, Reason} ->
+ {error, {"Backup traversal failed", Reason}}
+ end,
+ unlink(ClientPid),
+ ClientPid ! {iter_done, self(), Res}.
+
+trav_apply(Recs, _Header, _Schema, {iter, Fun, Acc, Mod, Iter}) ->
+ {NewRecs, Acc2} = filter_foldl(Fun, Acc, Recs),
+ if
+ Mod /= read_only, NewRecs /= [] ->
+ Iter2 = do_apply(Mod, write, [Iter, NewRecs], Iter),
+ {iter, Fun, Acc2, Mod, Iter2};
+ true ->
+ {iter, Fun, Acc2, Mod, Iter}
+ end;
+trav_apply(Recs, Header, Schema, {start, Fun, Acc, Mod, Iter}) ->
+ Iter2 =
+ if
+ Mod /= read_only ->
+ do_apply(Mod, write, [Iter, [Header]], Iter);
+ true ->
+ Iter
+ end,
+ TravAcc = trav_apply(Schema, Header, Schema, {iter, Fun, Acc, Mod, Iter2}),
+ trav_apply(Recs, Header, Schema, TravAcc).
+
+filter_foldl(Fun, Acc, [Head|Tail]) ->
+ case Fun(Head, Acc) of
+ {HeadItems, HeadAcc} when list(HeadItems) ->
+ {TailItems, TailAcc} = filter_foldl(Fun, HeadAcc, Tail),
+ {HeadItems ++ TailItems, TailAcc};
+ Other ->
+ throw({error, {"Fun must return a list", Other}})
+ end;
+filter_foldl(_Fun, Acc, []) ->
+ {[], Acc}.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint.erl
new file mode 100644
index 0000000000..aa2e99642b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint.erl
@@ -0,0 +1,1284 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_checkpoint.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_checkpoint).
+
+%% TM callback interface
+-export([
+ tm_add_copy/2,
+ tm_change_table_copy_type/3,
+ tm_del_copy/2,
+ tm_mnesia_down/1,
+ tm_prepare/1,
+ tm_retain/4,
+ tm_retain/5,
+ tm_enter_pending/1,
+ tm_enter_pending/3,
+ tm_exit_pending/1,
+ convert_cp_record/1
+ ]).
+
+%% Public interface
+-export([
+ activate/1,
+ checkpoints/0,
+ deactivate/1,
+ deactivate/2,
+ iterate/6,
+ most_local_node/2,
+ really_retain/2,
+ stop/0,
+ stop_iteration/1,
+ tables_and_cookie/1
+ ]).
+
+%% Internal
+-export([
+ call/2,
+ cast/2,
+ init/1,
+ remote_deactivate/1,
+ start/1
+ ]).
+
+%% sys callback interface
+-export([
+ system_code_change/4,
+ system_continue/3,
+ system_terminate/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [add/2, del/2, set/2, unset/1]).
+-import(mnesia_lib, [dbg_out/2]).
+
+-record(tm, {log, pending, transactions, checkpoints}).
+
+-record(checkpoint_args, {name = {now(), node()},
+ allow_remote = true,
+ ram_overrides_dump = false,
+ nodes = [],
+ node = node(),
+ now = now(),
+ cookie = ?unique_cookie,
+ min = [],
+ max = [],
+ pending_tab,
+ wait_for_old, % Initially undefined then List
+ is_activated = false,
+ ignore_new = [],
+ retainers = [],
+ iterators = [],
+ supervisor,
+ pid
+ }).
+
+%% Old record definition
+-record(checkpoint, {name,
+ allow_remote,
+ ram_overrides_dump,
+ nodes,
+ node,
+ now,
+ min,
+ max,
+ pending_tab,
+ wait_for_old,
+ is_activated,
+ ignore_new,
+ retainers,
+ iterators,
+ supervisor,
+ pid
+ }).
+
+-record(retainer, {cp_name, tab_name, store, writers = [], really_retain = true}).
+
+-record(iter, {tab_name, oid_tab, main_tab, retainer_tab, source, val, pid}).
+
+-record(pending, {tid, disc_nodes = [], ram_nodes = []}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% TM callback functions
+
+stop() ->
+ lists:foreach(fun(Name) -> call(Name, stop) end,
+ checkpoints()),
+ ok.
+
+tm_prepare(Cp) when record(Cp, checkpoint_args) ->
+ Name = Cp#checkpoint_args.name,
+ case lists:member(Name, checkpoints()) of
+ false ->
+ start_retainer(Cp);
+ true ->
+ {error, {already_exists, Name, node()}}
+ end;
+tm_prepare(Cp) when record(Cp, checkpoint) ->
+ %% Node with old protocol sent an old checkpoint record
+ %% and we have to convert it
+ case convert_cp_record(Cp) of
+ {ok, NewCp} ->
+ tm_prepare(NewCp);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+tm_mnesia_down(Node) ->
+ lists:foreach(fun(Name) -> cast(Name, {mnesia_down, Node}) end,
+ checkpoints()).
+
+%% Returns pending
+tm_enter_pending(Tid, DiscNs, RamNs) ->
+ Pending = #pending{tid = Tid, disc_nodes = DiscNs, ram_nodes = RamNs},
+ tm_enter_pending(Pending).
+
+tm_enter_pending(Pending) ->
+ PendingTabs = val(pending_checkpoints),
+ tm_enter_pending(PendingTabs, Pending).
+
+tm_enter_pending([], Pending) ->
+ Pending;
+tm_enter_pending([Tab | Tabs], Pending) ->
+ catch ?ets_insert(Tab, Pending),
+ tm_enter_pending(Tabs, Pending).
+
+tm_exit_pending(Tid) ->
+ Pids = val(pending_checkpoint_pids),
+ tm_exit_pending(Pids, Tid).
+
+tm_exit_pending([], Tid) ->
+ Tid;
+tm_exit_pending([Pid | Pids], Tid) ->
+ Pid ! {self(), {exit_pending, Tid}},
+ tm_exit_pending(Pids, Tid).
+
+enter_still_pending([Tid | Tids], Tab) ->
+ ?ets_insert(Tab, #pending{tid = Tid}),
+ enter_still_pending(Tids, Tab);
+enter_still_pending([], _Tab) ->
+ ok.
+
+
+%% Looks up checkpoints for functions in mnesia_tm.
+tm_retain(Tid, Tab, Key, Op) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ tm_retain(Tid, Tab, Key, Op, Checkpoints);
+ _ ->
+ undefined
+ end.
+
+tm_retain(Tid, Tab, Key, Op, Checkpoints) ->
+ case Op of
+ clear_table ->
+ OldRecs = mnesia_lib:db_match_object(Tab, '_'),
+ send_group_retain(OldRecs, Checkpoints, Tid, Tab, []),
+ OldRecs;
+ _ ->
+ OldRecs = mnesia_lib:db_get(Tab, Key),
+ send_retain(Checkpoints, {retain, Tid, Tab, Key, OldRecs}),
+ OldRecs
+ end.
+
+send_group_retain([Rec | Recs], Checkpoints, Tid, Tab, [PrevRec | PrevRecs])
+ when element(2, Rec) /= element(2, PrevRec) ->
+ Key = element(2, PrevRec),
+ OldRecs = lists:reverse([PrevRec | PrevRecs]),
+ send_retain(Checkpoints, {retain, Tid, Tab, Key, OldRecs}),
+ send_group_retain(Recs, Checkpoints, Tid, Tab, [Rec]);
+send_group_retain([Rec | Recs], Checkpoints, Tid, Tab, Acc) ->
+ send_group_retain(Recs, Checkpoints, Tid, Tab, [Rec | Acc]);
+send_group_retain([], Checkpoints, Tid, Tab, [PrevRec | PrevRecs]) ->
+ Key = element(2, PrevRec),
+ OldRecs = lists:reverse([PrevRec | PrevRecs]),
+ send_retain(Checkpoints, {retain, Tid, Tab, Key, OldRecs}),
+ ok;
+send_group_retain([], _Checkpoints, _Tid, _Tab, []) ->
+ ok.
+
+send_retain([Name | Names], Msg) ->
+ cast(Name, Msg),
+ send_retain(Names, Msg);
+send_retain([], _Msg) ->
+ ok.
+
+tm_add_copy(Tab, Node) when Node /= node() ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ Fun = fun(Name) -> call(Name, {add_copy, Tab, Node}) end,
+ map_call(Fun, Checkpoints, ok);
+ _ ->
+ ok
+ end.
+
+tm_del_copy(Tab, Node) when Node == node() ->
+ mnesia_subscr:unsubscribe_table(Tab),
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ Fun = fun(Name) -> call(Name, {del_copy, Tab, Node}) end,
+ map_call(Fun, Checkpoints, ok);
+ _ ->
+ ok
+ end.
+
+tm_change_table_copy_type(Tab, From, To) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ Fun = fun(Name) -> call(Name, {change_copy, Tab, From, To}) end,
+ map_call(Fun, Checkpoints, ok);
+ _ ->
+ ok
+ end.
+
+map_call(Fun, [Name | Names], Res) ->
+ case Fun(Name) of
+ ok ->
+ map_call(Fun, Names, Res);
+ {error, {no_exists, Name}} ->
+ map_call(Fun, Names, Res);
+ {error, Reason} ->
+ %% BUGBUG: We may end up with some checkpoint retainers
+ %% too much in the add_copy case. How do we remove them?
+ map_call(Fun, Names, {error, Reason})
+ end;
+map_call(_Fun, [], Res) ->
+ Res.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Public functions
+
+deactivate(Name) ->
+ case call(Name, get_checkpoint) of
+ {error, Reason} ->
+ {error, Reason};
+ Cp ->
+ deactivate(Cp#checkpoint_args.nodes, Name)
+ end.
+
+deactivate(Nodes, Name) ->
+ rpc:multicall(Nodes, ?MODULE, remote_deactivate, [Name]),
+ ok.
+
+remote_deactivate(Name) ->
+ call(Name, deactivate).
+
+checkpoints() -> val(checkpoints).
+
+tables_and_cookie(Name) ->
+ case call(Name, get_checkpoint) of
+ {error, Reason} ->
+ {error, Reason};
+ Cp ->
+ Tabs = Cp#checkpoint_args.min ++ Cp#checkpoint_args.max,
+ Cookie = Cp#checkpoint_args.cookie,
+ {ok, Tabs, Cookie}
+ end.
+
+most_local_node(Name, Tab) ->
+ case ?catch_val({Tab, {retainer, Name}}) of
+ {'EXIT', _} ->
+ {error, {"No retainer attached to table", [Tab, Name]}};
+ R ->
+ Writers = R#retainer.writers,
+ LocalWriter = lists:member(node(), Writers),
+ if
+ LocalWriter == true ->
+ {ok, node()};
+ Writers /= [] ->
+ {ok, hd(Writers)};
+ true ->
+ {error, {"No retainer attached to table", [Tab, Name]}}
+ end
+ end.
+
+really_retain(Name, Tab) ->
+ R = val({Tab, {retainer, Name}}),
+ R#retainer.really_retain.
+
+%% Activate a checkpoint.
+%%
+%% A checkpoint is a transaction consistent state that may be used to
+%% perform a distributed backup or to rollback the involved tables to
+%% their old state. Backups may also be used to restore tables to
+%% their old state. Args is a list of the following tuples:
+%%
+%% {name, Name}
+%% Name of checkpoint. Each checkpoint must have a name which
+%% is unique on the reachable nodes. The name may be reused when
+%% the checkpoint has been deactivated.
+%% By default a probably unique name is generated.
+%% Multiple checkpoints may be set on the same table.
+%%
+%% {allow_remote, Bool}
+%% false means that all retainers must be local. If the
+%% table does not reside locally, the checkpoint fails.
+%% true allows retainers on other nodes.
+%%
+%% {min, MinTabs}
+%% Minimize redundancy and only keep checkpoint info together with
+%% one replica, preferrably at the local node. If any node involved
+%% the checkpoint goes down, the checkpoint is deactivated.
+%%
+%% {max, MaxTabs}
+%% Maximize redundancy and keep checkpoint info together with all
+%% replicas. The checkpoint becomes more fault tolerant if the
+%% tables has several replicas. When new replicas are added, they
+%% will also get a retainer attached to them.
+%%
+%% {ram_overrides_dump, Bool}
+%% {ram_overrides_dump, Tabs}
+%% Only applicable for ram_copies. Bool controls which versions of
+%% the records that should be included in the checkpoint state.
+%% true means that the latest comitted records in ram (i.e. the
+%% records that the application accesses) should be included
+%% in the checkpoint. false means that the records dumped to
+%% dat-files (the records that will be loaded at startup) should
+%% be included in the checkpoint. Tabs is a list of tables.
+%% Default is false.
+%%
+%% {ignore_new, TidList}
+%% Normally we wait for all pending transactions to complete
+%% before we allow iteration over the checkpoint. But in order
+%% to cope with checkpoint activation inside a transaction that
+%% currently prepares commit (mnesia_init:get_net_work_copy) we
+%% need to have the ability to ignore the enclosing transaction.
+%% We do not wait for the transactions in TidList to end. The
+%% transactions in TidList are regarded as newer than the checkpoint.
+
+activate(Args) ->
+ case args2cp(Args) of
+ {ok, Cp} ->
+ do_activate(Cp);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+args2cp(Args) when list(Args)->
+ case catch lists:foldl(fun check_arg/2, #checkpoint_args{}, Args) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Cp ->
+ case check_tables(Cp) of
+ {error, Reason} ->
+ {error, Reason};
+ {ok, Overriders, AllTabs} ->
+ arrange_retainers(Cp, Overriders, AllTabs)
+ end
+ end;
+args2cp(Args) ->
+ {error, {badarg, Args}}.
+
+check_arg({name, Name}, Cp) ->
+ case lists:member(Name, checkpoints()) of
+ true ->
+ exit({already_exists, Name});
+ false ->
+ case catch tab2retainer({foo, Name}) of
+ List when list(List) ->
+ Cp#checkpoint_args{name = Name};
+ _ ->
+ exit({badarg, Name})
+ end
+ end;
+check_arg({allow_remote, true}, Cp) ->
+ Cp#checkpoint_args{allow_remote = true};
+check_arg({allow_remote, false}, Cp) ->
+ Cp#checkpoint_args{allow_remote = false};
+check_arg({ram_overrides_dump, true}, Cp) ->
+ Cp#checkpoint_args{ram_overrides_dump = true};
+check_arg({ram_overrides_dump, false}, Cp) ->
+ Cp#checkpoint_args{ram_overrides_dump = false};
+check_arg({ram_overrides_dump, Tabs}, Cp) when list(Tabs) ->
+ Cp#checkpoint_args{ram_overrides_dump = Tabs};
+check_arg({min, Tabs}, Cp) when list(Tabs) ->
+ Cp#checkpoint_args{min = Tabs};
+check_arg({max, Tabs}, Cp) when list(Tabs) ->
+ Cp#checkpoint_args{max = Tabs};
+check_arg({ignore_new, Tids}, Cp) when list(Tids) ->
+ Cp#checkpoint_args{ignore_new = Tids};
+check_arg(Arg, _) ->
+ exit({badarg, Arg}).
+
+check_tables(Cp) ->
+ Min = Cp#checkpoint_args.min,
+ Max = Cp#checkpoint_args.max,
+ AllTabs = Min ++ Max,
+ DoubleTabs = [T || T <- Min, lists:member(T, Max)],
+ Overriders = Cp#checkpoint_args.ram_overrides_dump,
+ if
+ DoubleTabs /= [] ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{min, DoubleTabs}, {max, DoubleTabs}]}};
+ Min == [], Max == [] ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{min, Min}, {max, Max}]}};
+ Overriders == false ->
+ {ok, [], AllTabs};
+ Overriders == true ->
+ {ok, AllTabs, AllTabs};
+ list(Overriders) ->
+ case [T || T <- Overriders, not lists:member(T, Min)] of
+ [] ->
+ case [T || T <- Overriders, not lists:member(T, Max)] of
+ [] ->
+ {ok, Overriders, AllTabs};
+ Outsiders ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{ram_overrides_dump, Outsiders},
+ {max, Outsiders}]}}
+ end;
+ Outsiders ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{ram_overrides_dump, Outsiders},
+ {min, Outsiders}]}}
+ end
+ end.
+
+arrange_retainers(Cp, Overriders, AllTabs) ->
+ R = #retainer{cp_name = Cp#checkpoint_args.name},
+ case catch [R#retainer{tab_name = Tab,
+ writers = select_writers(Cp, Tab)}
+ || Tab <- AllTabs] of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Retainers ->
+ {ok, Cp#checkpoint_args{ram_overrides_dump = Overriders,
+ retainers = Retainers,
+ nodes = writers(Retainers)}}
+ end.
+
+select_writers(Cp, Tab) ->
+ case filter_remote(Cp, val({Tab, active_replicas})) of
+ [] ->
+ exit({"Cannot prepare checkpoint (replica not available)",
+ [Tab, Cp#checkpoint_args.name]});
+ Writers ->
+ This = node(),
+ case {lists:member(Tab, Cp#checkpoint_args.max),
+ lists:member(This, Writers)} of
+ {true, _} -> Writers; % Max
+ {false, true} -> [This];
+ {false, false} -> [hd(Writers)]
+ end
+ end.
+
+filter_remote(Cp, Writers) when Cp#checkpoint_args.allow_remote == true ->
+ Writers;
+filter_remote(_Cp, Writers) ->
+ This = node(),
+ case lists:member(This, Writers) of
+ true -> [This];
+ false -> []
+ end.
+
+writers(Retainers) ->
+ Fun = fun(R, Acc) -> R#retainer.writers ++ Acc end,
+ Writers = lists:foldl(Fun, [], Retainers),
+ mnesia_lib:uniq(Writers).
+
+do_activate(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ Nodes = Cp#checkpoint_args.nodes,
+ case mnesia_tm:prepare_checkpoint(Nodes, Cp) of
+ {Replies, []} ->
+ check_prep(Replies, Name, Nodes, Cp#checkpoint_args.ignore_new);
+ {_, BadNodes} ->
+ {error, {"Cannot prepare checkpoint (bad nodes)",
+ [Name, BadNodes]}}
+ end.
+
+check_prep([{ok, Name, IgnoreNew, _Node} | Replies], Name, Nodes, IgnoreNew) ->
+ check_prep(Replies, Name, Nodes, IgnoreNew);
+check_prep([{error, Reason} | _Replies], Name, _Nodes, _IgnoreNew) ->
+ {error, {"Cannot prepare checkpoint (bad reply)",
+ [Name, Reason]}};
+check_prep([{badrpc, Reason} | _Replies], Name, _Nodes, _IgnoreNew) ->
+ {error, {"Cannot prepare checkpoint (badrpc)",
+ [Name, Reason]}};
+check_prep([], Name, Nodes, IgnoreNew) ->
+ collect_pending(Name, Nodes, IgnoreNew).
+
+collect_pending(Name, Nodes, IgnoreNew) ->
+ case rpc:multicall(Nodes, ?MODULE, call, [Name, collect_pending]) of
+ {Replies, []} ->
+ case catch ?ets_new_table(mnesia_union, [bag]) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table pending union",
+ {error, {system_limit, Msg, Reason}};
+ UnionTab ->
+ compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew)
+ end;
+ {_, BadNodes} ->
+ deactivate(Nodes, Name),
+ {error, {"Cannot collect from pending checkpoint", Name, BadNodes}}
+ end.
+
+compute_union([{ok, Pending} | Replies], Nodes, Name, UnionTab, IgnoreNew) ->
+ add_pending(Pending, UnionTab),
+ compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew);
+compute_union([{error, Reason} | _Replies], Nodes, Name, UnionTab, _IgnoreNew) ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, Reason};
+compute_union([{badrpc, Reason} | _Replies], Nodes, Name, UnionTab, _IgnoreNew) ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, {badrpc, Reason}};
+compute_union([], Nodes, Name, UnionTab, IgnoreNew) ->
+ send_activate(Nodes, Nodes, Name, UnionTab, IgnoreNew).
+
+add_pending([P | Pending], UnionTab) ->
+ add_pending_node(P#pending.disc_nodes, P#pending.tid, UnionTab),
+ add_pending_node(P#pending.ram_nodes, P#pending.tid, UnionTab),
+ add_pending(Pending, UnionTab);
+add_pending([], _UnionTab) ->
+ ok.
+
+add_pending_node([Node | Nodes], Tid, UnionTab) ->
+ ?ets_insert(UnionTab, {Node, Tid}),
+ add_pending_node(Nodes, Tid, UnionTab);
+add_pending_node([], _Tid, _UnionTab) ->
+ ok.
+
+send_activate([Node | Nodes], AllNodes, Name, UnionTab, IgnoreNew) ->
+ Pending = [Tid || {_, Tid} <- ?ets_lookup(UnionTab, Node),
+ not lists:member(Tid, IgnoreNew)],
+ case rpc:call(Node, ?MODULE, call, [Name, {activate, Pending}]) of
+ activated ->
+ send_activate(Nodes, AllNodes, Name, UnionTab, IgnoreNew);
+ {badrpc, Reason} ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, {"Activation failed (bad node)", Name, Node, Reason}};
+ {error, Reason} ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, {"Activation failed", Name, Node, Reason}}
+ end;
+send_activate([], AllNodes, Name, UnionTab, _IgnoreNew) ->
+ ?ets_delete_table(UnionTab),
+ {ok, Name, AllNodes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Checkpoint server
+
+cast(Name, Msg) ->
+ case ?catch_val({checkpoint, Name}) of
+ {'EXIT', _} ->
+ {error, {no_exists, Name}};
+
+ Pid when pid(Pid) ->
+ Pid ! {self(), Msg},
+ {ok, Pid}
+ end.
+
+call(Name, Msg) ->
+ case cast(Name, Msg) of
+ {ok, Pid} ->
+ catch link(Pid), % Always local
+ Self = self(),
+ receive
+ {'EXIT', Pid, Reason} ->
+ {error, {"Got exit", [Name, Reason]}};
+ {Name, Self, Reply} ->
+ unlink(Pid),
+ Reply
+ end;
+ Error ->
+ Error
+ end.
+
+abcast(Nodes, Name, Msg) ->
+ rpc:eval_everywhere(Nodes, ?MODULE, cast, [Name, Msg]).
+
+reply(nopid, _Name, _Reply) ->
+ ignore;
+reply(ReplyTo, Name, Reply) ->
+ ReplyTo ! {Name, ReplyTo, Reply}.
+
+%% Returns {ok, NewCp} or {error, Reason}
+start_retainer(Cp) ->
+ % Will never be restarted
+ Name = Cp#checkpoint_args.name,
+ case supervisor:start_child(mnesia_checkpoint_sup, [Cp]) of
+ {ok, _Pid} ->
+ {ok, Name, Cp#checkpoint_args.ignore_new, node()};
+ {error, Reason} ->
+ {error, {"Cannot create checkpoint retainer",
+ Name, node(), Reason}}
+ end.
+
+start(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ Args = [Cp#checkpoint_args{supervisor = self()}],
+ mnesia_monitor:start_proc({?MODULE, Name}, ?MODULE, init, Args).
+
+init(Cp) ->
+ process_flag(trap_exit, true),
+ Name = Cp#checkpoint_args.name,
+ Props = [set, public, {keypos, 2}],
+ case catch ?ets_new_table(mnesia_pending_checkpoint, Props) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table for pending transactions",
+ Error = {error, {system_limit, Name, Msg, Reason}},
+ proc_lib:init_ack(Cp#checkpoint_args.supervisor, Error);
+ PendingTab ->
+ Rs = [prepare_tab(Cp, R) || R <- Cp#checkpoint_args.retainers],
+ Cp2 = Cp#checkpoint_args{retainers = Rs,
+ pid = self(),
+ pending_tab = PendingTab},
+ add(pending_checkpoint_pids, self()),
+ add(pending_checkpoints, PendingTab),
+ set({checkpoint, Name}, self()),
+ add(checkpoints, Name),
+ dbg_out("Checkpoint ~p (~p) started~n", [Name, self()]),
+ proc_lib:init_ack(Cp2#checkpoint_args.supervisor, {ok, self()}),
+ retainer_loop(Cp2)
+ end.
+
+prepare_tab(Cp, R) ->
+ Tab = R#retainer.tab_name,
+ prepare_tab(Cp, R, val({Tab, storage_type})).
+
+prepare_tab(Cp, R, Storage) ->
+ Tab = R#retainer.tab_name,
+ Name = R#retainer.cp_name,
+ case lists:member(node(), R#retainer.writers) of
+ true ->
+ R2 = retainer_create(Cp, R, Tab, Name, Storage),
+ set({Tab, {retainer, Name}}, R2),
+ add({Tab, checkpoints}, Name), %% Keep checkpoint info for table_info & mnesia_session
+ add_chkp_info(Tab, Name),
+ R2;
+ false ->
+ set({Tab, {retainer, Name}}, R#retainer{store = undefined}),
+ R
+ end.
+
+add_chkp_info(Tab, Name) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, OldList} | CommitList] ->
+ case lists:member(Name, OldList) of
+ true ->
+ ok;
+ false ->
+ NewC = [{checkpoints, [Name | OldList]} | CommitList],
+ mnesia_lib:set({Tab, commit_work}, NewC)
+ end;
+ CommitList ->
+ Chkp = {checkpoints, [Name]},
+ %% OBS checkpoints needs to be first in the list!
+ mnesia_lib:set({Tab, commit_work}, [Chkp | CommitList])
+ end.
+
+tab2retainer({Tab, Name}) ->
+ FlatName = lists:flatten(io_lib:write(Name)),
+ mnesia_lib:dir(lists:concat([?MODULE, "_", Tab, "_", FlatName, ".RET"])).
+
+retainer_create(_Cp, R, Tab, Name, disc_only_copies) ->
+ Fname = tab2retainer({Tab, Name}),
+ file:delete(Fname),
+ Args = [{file, Fname}, {type, set}, {keypos, 2}, {repair, false}],
+ {ok, _} = mnesia_lib:dets_sync_open({Tab, Name}, Args),
+ dbg_out("Checkpoint retainer created ~p ~p~n", [Name, Tab]),
+ R#retainer{store = {dets, {Tab, Name}}, really_retain = true};
+retainer_create(Cp, R, Tab, Name, Storage) ->
+ T = ?ets_new_table(mnesia_retainer, [set, public, {keypos, 2}]),
+ Overriders = Cp#checkpoint_args.ram_overrides_dump,
+ ReallyR = R#retainer.really_retain,
+ ReallyCp = lists:member(Tab, Overriders),
+ ReallyR2 = prepare_ram_tab(Tab, T, Storage, ReallyR, ReallyCp),
+ dbg_out("Checkpoint retainer created ~p ~p~n", [Name, Tab]),
+ R#retainer{store = {ets, T}, really_retain = ReallyR2}.
+
+%% Copy the dumped table into retainer if needed
+%% If the really_retain flag already has been set to false,
+%% it should remain false even if we change storage type
+%% while the checkpoint is activated.
+prepare_ram_tab(Tab, T, ram_copies, true, false) ->
+ Fname = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Fname) of
+ true ->
+ Log = mnesia_log:open_log(prepare_ram_tab,
+ mnesia_log:dcd_log_header(),
+ Fname, true,
+ mnesia_monitor:get_env(auto_repair),
+ read_only),
+ Add = fun(Rec) ->
+ Key = element(2, Rec),
+ Recs =
+ case ?ets_lookup(T, Key) of
+ [] -> [];
+ [{_, _, Old}] -> Old
+ end,
+ ?ets_insert(T, {Tab, Key, [Rec | Recs]}),
+ continue
+ end,
+ traverse_dcd(mnesia_log:chunk_log(Log, start), Log, Add),
+ mnesia_log:close_log(Log);
+ false ->
+ ok
+ end,
+ false;
+prepare_ram_tab(_, _, _, ReallyRetain, _) ->
+ ReallyRetain.
+
+traverse_dcd({Cont, [LogH | Rest]}, Log, Fun)
+ when record(LogH, log_header),
+ LogH#log_header.log_kind == dcd_log,
+ LogH#log_header.log_version >= "1.0" ->
+ traverse_dcd({Cont, Rest}, Log, Fun); %% BUGBUG Error handling repaired files
+traverse_dcd({Cont, Recs}, Log, Fun) -> %% trashed data??
+ lists:foreach(Fun, Recs),
+ traverse_dcd(mnesia_log:chunk_log(Log, Cont), Log, Fun);
+traverse_dcd(eof, _Log, _Fun) ->
+ ok.
+
+retainer_get({ets, Store}, Key) -> ?ets_lookup(Store, Key);
+retainer_get({dets, Store}, Key) -> dets:lookup(Store, Key).
+
+retainer_put({ets, Store}, Val) -> ?ets_insert(Store, Val);
+retainer_put({dets, Store}, Val) -> dets:insert(Store, Val).
+
+retainer_first({ets, Store}) -> ?ets_first(Store);
+retainer_first({dets, Store}) -> dets:first(Store).
+
+retainer_next({ets, Store}, Key) -> ?ets_next(Store, Key);
+retainer_next({dets, Store}, Key) -> dets:next(Store, Key).
+
+%% retainer_next_slot(Tab, Pos) ->
+%% case retainer_slot(Tab, Pos) of
+%% '$end_of_table' ->
+%% '$end_of_table';
+%% [] ->
+%% retainer_next_slot(Tab, Pos + 1);
+%% Recs when list(Recs) ->
+%% {Pos, Recs}
+%% end.
+%%
+%% retainer_slot({ets, Store}, Pos) -> ?ets_next(Store, Pos);
+%% retainer_slot({dets, Store}, Pos) -> dets:slot(Store, Pos).
+
+retainer_fixtable(Tab, Bool) when atom(Tab) ->
+ mnesia_lib:db_fixtable(val({Tab, storage_type}), Tab, Bool);
+retainer_fixtable({ets, Tab}, Bool) ->
+ mnesia_lib:db_fixtable(ram_copies, Tab, Bool);
+retainer_fixtable({dets, Tab}, Bool) ->
+ mnesia_lib:db_fixtable(disc_only_copies, Tab, Bool).
+
+retainer_delete({ets, Store}) ->
+ ?ets_delete_table(Store);
+retainer_delete({dets, Store}) ->
+ mnesia_lib:dets_sync_close(Store),
+ Fname = tab2retainer(Store),
+ file:delete(Fname).
+
+retainer_loop(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ receive
+ {_From, {retain, Tid, Tab, Key, OldRecs}}
+ when Cp#checkpoint_args.wait_for_old == [] ->
+ R = val({Tab, {retainer, Name}}),
+ case R#retainer.really_retain of
+ true ->
+ PendingTab = Cp#checkpoint_args.pending_tab,
+ case catch ?ets_lookup_element(PendingTab, Tid, 1) of
+ {'EXIT', _} ->
+ Store = R#retainer.store,
+ case retainer_get(Store, Key) of
+ [] ->
+ retainer_put(Store, {Tab, Key, OldRecs});
+ _ ->
+ already_retained
+ end;
+ pending ->
+ ignore
+ end;
+ false ->
+ ignore
+ end,
+ retainer_loop(Cp);
+
+ %% Adm
+ {From, deactivate} ->
+ do_stop(Cp),
+ reply(From, Name, deactivated),
+ unlink(From),
+ exit(shutdown);
+
+ {'EXIT', Parent, _} when Parent == Cp#checkpoint_args.supervisor ->
+ %% do_stop(Cp),
+ %% assume that entire Mnesia is terminating
+ exit(shutdown);
+
+ {_From, {mnesia_down, Node}} ->
+ Cp2 = do_del_retainers(Cp, Node),
+ retainer_loop(Cp2);
+ {From, get_checkpoint} ->
+ reply(From, Name, Cp),
+ retainer_loop(Cp);
+ {From, {add_copy, Tab, Node}} when Cp#checkpoint_args.wait_for_old == [] ->
+ {Res, Cp2} = do_add_copy(Cp, Tab, Node),
+ reply(From, Name, Res),
+ retainer_loop(Cp2);
+ {From, {del_copy, Tab, Node}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = do_del_copy(Cp, Tab, Node),
+ reply(From, Name, ok),
+ retainer_loop(Cp2);
+ {From, {change_copy, Tab, From, To}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = do_change_copy(Cp, Tab, From, To),
+ reply(From, Name, ok),
+ retainer_loop(Cp2);
+ {_From, {add_retainer, R, Node}} ->
+ Cp2 = do_add_retainer(Cp, R, Node),
+ retainer_loop(Cp2);
+ {_From, {del_retainer, R, Node}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = do_del_retainer(Cp, R, Node),
+ retainer_loop(Cp2);
+
+ %% Iteration
+ {From, {iter_begin, Iter}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = iter_begin(Cp, From, Iter),
+ retainer_loop(Cp2);
+
+ {From, {iter_end, Iter}} when Cp#checkpoint_args.wait_for_old == [] ->
+ retainer_fixtable(Iter#iter.oid_tab, false),
+ Iters = Cp#checkpoint_args.iterators -- [Iter],
+ reply(From, Name, ok),
+ retainer_loop(Cp#checkpoint_args{iterators = Iters});
+
+ {_From, {exit_pending, Tid}}
+ when list(Cp#checkpoint_args.wait_for_old) ->
+ StillPending = lists:delete(Tid, Cp#checkpoint_args.wait_for_old),
+ Cp2 = Cp#checkpoint_args{wait_for_old = StillPending},
+ Cp3 = maybe_activate(Cp2),
+ retainer_loop(Cp3);
+
+ {From, collect_pending} ->
+ PendingTab = Cp#checkpoint_args.pending_tab,
+ del(pending_checkpoints, PendingTab),
+ Pending = ?ets_match_object(PendingTab, '_'),
+ reply(From, Name, {ok, Pending}),
+ retainer_loop(Cp);
+
+ {From, {activate, Pending}} ->
+ StillPending = mnesia_recover:still_pending(Pending),
+ enter_still_pending(StillPending, Cp#checkpoint_args.pending_tab),
+ Cp2 = maybe_activate(Cp#checkpoint_args{wait_for_old = StillPending}),
+ reply(From, Name, activated),
+ retainer_loop(Cp2);
+
+ {'EXIT', From, _Reason} ->
+ Iters = [Iter || Iter <- Cp#checkpoint_args.iterators,
+ check_iter(From, Iter)],
+ retainer_loop(Cp#checkpoint_args{iterators = Iters});
+
+ {system, From, Msg} ->
+ dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ sys:handle_system_msg(Msg, From, no_parent, ?MODULE, [], Cp)
+ end.
+
+maybe_activate(Cp)
+ when Cp#checkpoint_args.wait_for_old == [],
+ Cp#checkpoint_args.is_activated == false ->
+ Cp#checkpoint_args{pending_tab = undefined, is_activated = true};
+maybe_activate(Cp) ->
+ Cp.
+
+iter_begin(Cp, From, Iter) ->
+ Name = Cp#checkpoint_args.name,
+ R = val({Iter#iter.tab_name, {retainer, Name}}),
+ Iter2 = init_tabs(R, Iter),
+ Iter3 = Iter2#iter{pid = From},
+ retainer_fixtable(Iter3#iter.oid_tab, true),
+ Iters = [Iter3 | Cp#checkpoint_args.iterators],
+ reply(From, Name, {ok, Iter3, self()}),
+ Cp#checkpoint_args{iterators = Iters}.
+
+do_stop(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ del(pending_checkpoints, Cp#checkpoint_args.pending_tab),
+ del(pending_checkpoint_pids, self()),
+ del(checkpoints, Name),
+ unset({checkpoint, Name}),
+ lists:foreach(fun deactivate_tab/1, Cp#checkpoint_args.retainers),
+ Iters = Cp#checkpoint_args.iterators,
+ lists:foreach(fun(I) -> retainer_fixtable(I#iter.oid_tab, false) end, Iters).
+
+deactivate_tab(R) ->
+ Name = R#retainer.cp_name,
+ Tab = R#retainer.tab_name,
+ del({Tab, checkpoints}, Name), %% Keep checkpoint info for table_info & mnesia_session
+ del_chkp_info(Tab, Name),
+ unset({Tab, {retainer, Name}}),
+ Active = lists:member(node(), R#retainer.writers),
+ case R#retainer.store of
+ undefined ->
+ ignore;
+ Store when Active == true ->
+ retainer_delete(Store);
+ _ ->
+ ignore
+ end.
+
+del_chkp_info(Tab, Name) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, ChkList} | Rest] ->
+ case lists:delete(Name, ChkList) of
+ [] ->
+ %% The only checkpoint was deleted
+ mnesia_lib:set({Tab, commit_work}, Rest);
+ NewList ->
+ mnesia_lib:set({Tab, commit_work},
+ [{checkpoints, NewList} | Rest])
+ end;
+ _ -> ignore
+ end.
+
+do_del_retainers(Cp, Node) ->
+ Rs = [do_del_retainer2(Cp, R, Node) || R <- Cp#checkpoint_args.retainers],
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+do_del_retainer2(Cp, R, Node) ->
+ Writers = R#retainer.writers -- [Node],
+ R2 = R#retainer{writers = Writers},
+ set({R2#retainer.tab_name, {retainer, R2#retainer.cp_name}}, R2),
+ if
+ Writers == [] ->
+ Event = {mnesia_checkpoint_deactivated, Cp#checkpoint_args.name},
+ mnesia_lib:report_system_event(Event),
+ do_stop(Cp),
+ exit(shutdown);
+ Node == node() ->
+ deactivate_tab(R), % Avoids unnecessary tm_retain accesses
+ set({R2#retainer.tab_name, {retainer, R2#retainer.cp_name}}, R2),
+ R2;
+ true ->
+ R2
+ end.
+
+do_del_retainer(Cp, R0, Node) ->
+ {R, Rest} = find_retainer(R0, Cp#checkpoint_args.retainers, []),
+ R2 = do_del_retainer2(Cp, R, Node),
+ Rs = [R2|Rest],
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+do_del_copy(Cp, Tab, ThisNode) when ThisNode == node() ->
+ Name = Cp#checkpoint_args.name,
+ Others = Cp#checkpoint_args.nodes -- [ThisNode],
+ R = val({Tab, {retainer, Name}}),
+ abcast(Others, Name, {del_retainer, R, ThisNode}),
+ do_del_retainer(Cp, R, ThisNode).
+
+do_add_copy(Cp, Tab, Node) when Node /= node()->
+ case lists:member(Tab, Cp#checkpoint_args.max) of
+ false ->
+ {ok, Cp};
+ true ->
+ Name = Cp#checkpoint_args.name,
+ R0 = val({Tab, {retainer, Name}}),
+ W = R0#retainer.writers,
+ R = R0#retainer{writers = W ++ [Node]},
+
+ case lists:member(Node, Cp#checkpoint_args.nodes) of
+ true ->
+ send_retainer(Cp, R, Node);
+ false ->
+ case tm_remote_prepare(Node, Cp) of
+ {ok, Name, _IgnoreNew, Node} ->
+ case lists:member(schema, Cp#checkpoint_args.max) of
+ true ->
+ %% We need to send schema retainer somewhere
+ RS0 = val({schema, {retainer, Name}}),
+ W = RS0#retainer.writers,
+ RS1 = RS0#retainer{writers = W ++ [Node]},
+ case send_retainer(Cp, RS1, Node) of
+ {ok, Cp1} ->
+ send_retainer(Cp1, R, Node);
+ Error ->
+ Error
+ end;
+ false ->
+ send_retainer(Cp, R, Node)
+ end;
+ {badrpc, Reason} ->
+ {{error, {badrpc, Reason}}, Cp};
+ {error, Reason} ->
+ {{error, Reason}, Cp}
+ end
+ end
+ end.
+
+tm_remote_prepare(Node, Cp) ->
+ rpc:call(Node, ?MODULE, tm_prepare, [Cp]).
+
+do_add_retainer(Cp, R0, Node) ->
+ Writers = R0#retainer.writers,
+ {R, Rest} = find_retainer(R0, Cp#checkpoint_args.retainers, []),
+ NewRet =
+ if
+ Node == node() ->
+ prepare_tab(Cp, R#retainer{writers = Writers});
+ true ->
+ R#retainer{writers = Writers}
+ end,
+ Rs = [NewRet | Rest],
+ set({NewRet#retainer.tab_name, {retainer, NewRet#retainer.cp_name}}, NewRet),
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+find_retainer(#retainer{cp_name = CP, tab_name = Tab},
+ [Ret = #retainer{cp_name = CP, tab_name = Tab} | R], Acc) ->
+ {Ret, R ++ Acc};
+find_retainer(Ret, [H|R], Acc) ->
+ find_retainer(Ret, R, [H|Acc]).
+
+send_retainer(Cp, R, Node) ->
+ Name = Cp#checkpoint_args.name,
+ Nodes0 = Cp#checkpoint_args.nodes -- [Node],
+ Nodes1 = Nodes0 ++ [Node],
+ Nodes = Nodes1 -- [node()],
+ abcast(Nodes, Name, {add_retainer, R, Node}),
+ Store = R#retainer.store,
+%% send_retainer2(Node, Name, Store, retainer_next_slot(Store, 0)),
+ send_retainer2(Node, Name, Store, retainer_first(Store)),
+ Cp2 = do_add_retainer(Cp, R, Node),
+ {ok, Cp2}.
+
+send_retainer2(_, _, _, '$end_of_table') ->
+ ok;
+%%send_retainer2(Node, Name, Store, {Slot, Records}) ->
+send_retainer2(Node, Name, Store, Key) ->
+ [{Tab, _, Records}] = retainer_get(Store, Key),
+ abcast([Node], Name, {retain, {dirty, send_retainer}, Tab, Key, Records}),
+ send_retainer2(Node, Name, Store, retainer_next(Store, Key)).
+
+do_change_copy(Cp, Tab, FromType, ToType) ->
+ Name = Cp#checkpoint_args.name,
+ R = val({Tab, {retainer, Name}}),
+ R2 = prepare_tab(Cp, R, ToType),
+ {_, Old} = R#retainer.store,
+ {_, New} = R2#retainer.store,
+
+ Fname = tab2retainer({Tab, Name}),
+ if
+ FromType == disc_only_copies ->
+ mnesia_lib:dets_sync_close(Old),
+ loaded = mnesia_lib:dets_to_ets(Old, New, Fname, set, no, yes),
+ ok = file:delete(Fname);
+ ToType == disc_only_copies ->
+ TabSize = ?ets_info(Old, size),
+ Props = [{file, Fname},
+ {type, set},
+ {keypos, 2},
+%% {ram_file, true},
+ {estimated_no_objects, TabSize + 256},
+ {repair, false}],
+ {ok, _} = mnesia_lib:dets_sync_open(New, Props),
+ ok = mnesia_dumper:raw_dump_table(New, Old),
+ ?ets_delete_table(Old);
+ true ->
+ ignore
+ end,
+ Pos = #retainer.tab_name,
+ Rs = lists:keyreplace(Tab, Pos, Cp#checkpoint_args.retainers, R2),
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+check_iter(From, Iter) when Iter#iter.pid == From ->
+ retainer_fixtable(Iter#iter.oid_tab, false),
+ false;
+check_iter(_From, _Iter) ->
+ true.
+
+init_tabs(R, Iter) ->
+ {Kind, _} = Store = R#retainer.store,
+ Main = {Kind, Iter#iter.tab_name},
+ Ret = Store,
+ Iter2 = Iter#iter{main_tab = Main, retainer_tab = Ret},
+ case Iter#iter.source of
+ table -> Iter2#iter{oid_tab = Main};
+ retainer -> Iter2#iter{oid_tab = Ret}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Iteration
+%%
+%% Iterates over a table and applies Fun(ListOfRecords)
+%% with a suitable amount of records, e.g. 1000 or so.
+%% ListOfRecords is [] when the iteration is over.
+%%
+%% OidKind affects which internal table to be iterated over and
+%% ValKind affects which table to pick the actual records from. Legal
+%% values for OidKind and ValKind is the atom table or the atom
+%% retainer.
+%%
+%% The iteration may either be performed over the main table (which
+%% contains the latest values of the records, i.e. the values that
+%% are visible to the applications) or over the checkpoint retainer
+%% (which contains the values as the looked like the timepoint when
+%% the checkpoint was activated).
+%%
+%% It is possible to iterate over the main table and pick values
+%% from the retainer and vice versa.
+
+iterate(Name, Tab, Fun, Acc, Source, Val) ->
+ Iter0 = #iter{tab_name = Tab, source = Source, val = Val},
+ case call(Name, {iter_begin, Iter0}) of
+ {error, Reason} ->
+ {error, Reason};
+ {ok, Iter, Pid} ->
+ link(Pid), % We don't want any pending fixtable's
+ Res = (catch iter(Fun, Acc, Iter)),
+ unlink(Pid),
+ call(Name, {iter_end, Iter}),
+ case Res of
+ {'EXIT', Reason} -> {error, Reason};
+ {error, Reason} -> {error, Reason};
+ Acc2 -> {ok, Acc2}
+ end
+ end.
+
+iter(Fun, Acc, Iter)->
+ iter(Fun, Acc, Iter, retainer_first(Iter#iter.oid_tab)).
+
+iter(Fun, Acc, Iter, Key) ->
+ case get_records(Iter, Key) of
+ {'$end_of_table', []} ->
+ Fun([], Acc);
+ {'$end_of_table', Records} ->
+ Acc2 = Fun(Records, Acc),
+ Fun([], Acc2);
+ {Next, Records} ->
+ Acc2 = Fun(Records, Acc),
+ iter(Fun, Acc2, Iter, Next)
+ end.
+
+stop_iteration(Reason) ->
+ throw({error, {stopped, Reason}}).
+
+get_records(Iter, Key) ->
+ get_records(Iter, Key, 500, []). % 500 keys
+
+get_records(_Iter, Key, 0, Acc) ->
+ {Key, lists:append(lists:reverse(Acc))};
+get_records(_Iter, '$end_of_table', _I, Acc) ->
+ {'$end_of_table', lists:append(lists:reverse(Acc))};
+get_records(Iter, Key, I, Acc) ->
+ Recs = get_val(Iter, Key),
+ Next = retainer_next(Iter#iter.oid_tab, Key),
+ get_records(Iter, Next, I-1, [Recs | Acc]).
+
+get_val(Iter, Key) when Iter#iter.val == latest ->
+ get_latest_val(Iter, Key);
+get_val(Iter, Key) when Iter#iter.val == checkpoint ->
+ get_checkpoint_val(Iter, Key).
+
+get_latest_val(Iter, Key) when Iter#iter.source == table ->
+ retainer_get(Iter#iter.main_tab, Key);
+get_latest_val(Iter, Key) when Iter#iter.source == retainer ->
+ DeleteOid = {Iter#iter.tab_name, Key},
+ [DeleteOid | retainer_get(Iter#iter.main_tab, Key)].
+
+get_checkpoint_val(Iter, Key) when Iter#iter.source == table ->
+ retainer_get(Iter#iter.main_tab, Key);
+get_checkpoint_val(Iter, Key) when Iter#iter.source == retainer ->
+ DeleteOid = {Iter#iter.tab_name, Key},
+ case retainer_get(Iter#iter.retainer_tab, Key) of
+ [{_, _, []}] -> [DeleteOid];
+ [{_, _, Records}] -> [DeleteOid | Records]
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, Cp) ->
+ retainer_loop(Cp).
+
+system_terminate(_Reason, _Parent,_Debug, Cp) ->
+ do_stop(Cp).
+
+system_code_change(Cp, _Module, _OldVsn, _Extra) ->
+ {ok, Cp}.
+
+convert_cp_record(Cp) when record(Cp, checkpoint) ->
+ ROD =
+ case Cp#checkpoint.ram_overrides_dump of
+ true -> Cp#checkpoint.min ++ Cp#checkpoint.max;
+ false -> []
+ end,
+
+ {ok, #checkpoint_args{name = Cp#checkpoint.name,
+ allow_remote = Cp#checkpoint.name,
+ ram_overrides_dump = ROD,
+ nodes = Cp#checkpoint.nodes,
+ node = Cp#checkpoint.node,
+ now = Cp#checkpoint.now,
+ cookie = ?unique_cookie,
+ min = Cp#checkpoint.min,
+ max = Cp#checkpoint.max,
+ pending_tab = Cp#checkpoint.pending_tab,
+ wait_for_old = Cp#checkpoint.wait_for_old,
+ is_activated = Cp#checkpoint.is_activated,
+ ignore_new = Cp#checkpoint.ignore_new,
+ retainers = Cp#checkpoint.retainers,
+ iterators = Cp#checkpoint.iterators,
+ supervisor = Cp#checkpoint.supervisor,
+ pid = Cp#checkpoint.pid
+ }};
+convert_cp_record(Cp) when record(Cp, checkpoint_args) ->
+ AllTabs = Cp#checkpoint_args.min ++ Cp#checkpoint_args.max,
+ ROD = case Cp#checkpoint_args.ram_overrides_dump of
+ [] ->
+ false;
+ AllTabs ->
+ true;
+ _ ->
+ error
+ end,
+ if
+ ROD == error ->
+ {error, {"Old node cannot handle new checkpoint protocol",
+ ram_overrides_dump}};
+ true ->
+ {ok, #checkpoint{name = Cp#checkpoint_args.name,
+ allow_remote = Cp#checkpoint_args.name,
+ ram_overrides_dump = ROD,
+ nodes = Cp#checkpoint_args.nodes,
+ node = Cp#checkpoint_args.node,
+ now = Cp#checkpoint_args.now,
+ min = Cp#checkpoint_args.min,
+ max = Cp#checkpoint_args.max,
+ pending_tab = Cp#checkpoint_args.pending_tab,
+ wait_for_old = Cp#checkpoint_args.wait_for_old,
+ is_activated = Cp#checkpoint_args.is_activated,
+ ignore_new = Cp#checkpoint_args.ignore_new,
+ retainers = Cp#checkpoint_args.retainers,
+ iterators = Cp#checkpoint_args.iterators,
+ supervisor = Cp#checkpoint_args.supervisor,
+ pid = Cp#checkpoint_args.pid
+ }}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl
new file mode 100644
index 0000000000..29e31f15a6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl
@@ -0,0 +1,39 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_checkpoint_sup.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_checkpoint_sup).
+
+-behaviour(supervisor).
+
+-export([start/0, init/1]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% top supervisor callback functions
+
+start() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% sub supervisor callback functions
+
+init([]) ->
+ Flags = {simple_one_for_one, 0, timer:hours(24)}, % Trust the top supervisor
+ MFA = {mnesia_checkpoint, start, []},
+ Modules = [?MODULE, mnesia_checkpoint, supervisor],
+ KillAfter = mnesia_kernel_sup:supervisor_timeout(timer:seconds(3)),
+ Workers = [{?MODULE, MFA, transient, KillAfter, worker, Modules}],
+ {ok, {Flags, Workers}}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_controller.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_controller.erl
new file mode 100644
index 0000000000..b6f865f0d4
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_controller.erl
@@ -0,0 +1,2012 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_controller.erl,v 1.3 2010/03/04 13:54:19 maria Exp $
+%%
+%% The mnesia_init process loads tables from local disc or from
+%% another nodes. It also coordinates updates of the info about
+%% where we can read and write tables.
+%%
+%% Tables may need to be loaded initially at startup of the local
+%% node or when other nodes announces that they already have loaded
+%% tables that we also want.
+%%
+%% Initially we set the load request queue to those tables that we
+%% safely can load locally, i.e. tables where we have the last
+%% consistent replica and we have received mnesia_down from all
+%% other nodes holding the table. Then we let the mnesia_init
+%% process enter its normal working state.
+%%
+%% When we need to load a table we append a request to the load
+%% request queue. All other requests are regarded as high priority
+%% and are processed immediately (e.g. update table whereabouts).
+%% We processes the load request queue as a "background" job..
+
+-module(mnesia_controller).
+
+-behaviour(gen_server).
+
+%% Mnesia internal stuff
+-export([
+ start/0,
+ i_have_tab/1,
+ info/0,
+ get_info/1,
+ get_workers/1,
+ force_load_table/1,
+ async_dump_log/1,
+ sync_dump_log/1,
+ connect_nodes/1,
+ wait_for_schema_commit_lock/0,
+ release_schema_commit_lock/0,
+ create_table/1,
+ get_disc_copy/1,
+ get_cstructs/0,
+ sync_and_block_table_whereabouts/4,
+ sync_del_table_copy_whereabouts/2,
+ block_table/1,
+ unblock_table/1,
+ block_controller/0,
+ unblock_controller/0,
+ unannounce_add_table_copy/2,
+ master_nodes_updated/2,
+ mnesia_down/1,
+ add_active_replica/2,
+ add_active_replica/3,
+ add_active_replica/4,
+ change_table_access_mode/1,
+ del_active_replica/2,
+ wait_for_tables/2,
+ get_network_copy/2,
+ merge_schema/0,
+ start_remote_sender/4,
+ schedule_late_disc_load/2
+ ]).
+
+%% gen_server callbacks
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+%% Module internal stuff
+-export([call/1,
+ cast/1,
+ dump_and_reply/2,
+ load_and_reply/2,
+ send_and_reply/2,
+ wait_for_tables_init/2
+ ]).
+
+-import(mnesia_lib, [set/2, add/2]).
+-import(mnesia_lib, [fatal/2, error/2, verbose/2, dbg_out/2]).
+
+-include("mnesia.hrl").
+
+-define(SERVER_NAME, ?MODULE).
+
+-record(state, {supervisor,
+ schema_is_merged = false,
+ early_msgs = [],
+ loader_pid,
+ loader_queue = [],
+ sender_pid,
+ sender_queue = [],
+ late_loader_queue = [],
+ dumper_pid, % Dumper or schema commit pid
+ dumper_queue = [], % Dumper or schema commit queue
+ dump_log_timer_ref,
+ is_stopping = false
+ }).
+
+-record(worker_reply, {what,
+ pid,
+ result
+ }).
+
+-record(schema_commit_lock, {owner}).
+-record(block_controller, {owner}).
+
+-record(dump_log, {initiated_by,
+ opt_reply_to
+ }).
+
+-record(net_load, {table,
+ reason,
+ opt_reply_to,
+ cstruct = unknown
+ }).
+
+-record(send_table, {table,
+ receiver_pid,
+ remote_storage
+ }).
+
+-record(disc_load, {table,
+ reason,
+ opt_reply_to
+ }).
+
+-record(late_load, {table,
+ reason,
+ opt_reply_to,
+ loaders
+ }).
+
+-record(loader_done, {worker_pid,
+ is_loaded,
+ table_name,
+ needs_announce,
+ needs_sync,
+ needs_reply,
+ reply_to,
+ reply}).
+
+-record(sender_done, {worker_pid,
+ worker_res,
+ table_name
+ }).
+
+-record(dumper_done, {worker_pid,
+ worker_res
+ }).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+start() ->
+ gen_server:start_link({local, ?SERVER_NAME}, ?MODULE, [self()],
+ [{timeout, infinity}
+ %% ,{debug, [trace]}
+ ]).
+
+sync_dump_log(InitBy) ->
+ call({sync_dump_log, InitBy}).
+
+async_dump_log(InitBy) ->
+ ?SERVER_NAME ! {async_dump_log, InitBy}.
+
+%% Wait for tables to be active
+%% If needed, we will wait for Mnesia to start
+%% If Mnesia stops, we will wait for Mnesia to restart
+%% We will wait even if the list of tables is empty
+%%
+wait_for_tables(Tabs, Timeout) when list(Tabs), Timeout == infinity ->
+ do_wait_for_tables(Tabs, Timeout);
+wait_for_tables(Tabs, Timeout) when list(Tabs),
+ integer(Timeout), Timeout >= 0 ->
+ do_wait_for_tables(Tabs, Timeout);
+wait_for_tables(Tabs, Timeout) ->
+ {error, {badarg, Tabs, Timeout}}.
+
+do_wait_for_tables(Tabs, 0) ->
+ reply_wait(Tabs);
+do_wait_for_tables(Tabs, Timeout) ->
+ Pid = spawn_link(?MODULE, wait_for_tables_init, [self(), Tabs]),
+ receive
+ {?SERVER_NAME, Pid, Res} ->
+ Res;
+
+ {'EXIT', Pid, _} ->
+ reply_wait(Tabs)
+
+ after Timeout ->
+ unlink(Pid),
+ exit(Pid, timeout),
+ reply_wait(Tabs)
+ end.
+
+reply_wait(Tabs) ->
+ case catch mnesia_lib:active_tables() of
+ {'EXIT', _} ->
+ {error, {node_not_running, node()}};
+ Active when list(Active) ->
+ case Tabs -- Active of
+ [] ->
+ ok;
+ BadTabs ->
+ {timeout, BadTabs}
+ end
+ end.
+
+wait_for_tables_init(From, Tabs) ->
+ process_flag(trap_exit, true),
+ Res = wait_for_init(From, Tabs, whereis(?SERVER_NAME)),
+ From ! {?SERVER_NAME, self(), Res},
+ unlink(From),
+ exit(normal).
+
+wait_for_init(From, Tabs, Init) ->
+ case catch link(Init) of
+ {'EXIT', _} ->
+ %% Mnesia is not started
+ {error, {node_not_running, node()}};
+ true when pid(Init) ->
+ cast({sync_tabs, Tabs, self()}),
+ rec_tabs(Tabs, Tabs, From, Init)
+ end.
+
+sync_reply(Waiter, Tab) ->
+ Waiter ! {?SERVER_NAME, {tab_synced, Tab}}.
+
+rec_tabs([Tab | Tabs], AllTabs, From, Init) ->
+ receive
+ {?SERVER_NAME, {tab_synced, Tab}} ->
+ rec_tabs(Tabs, AllTabs, From, Init);
+
+ {'EXIT', From, _} ->
+ %% This will trigger an exit signal
+ %% to mnesia_init
+ exit(wait_for_tables_timeout);
+
+ {'EXIT', Init, _} ->
+ %% Oops, mnesia_init stopped,
+ exit(mnesia_stopped)
+ end;
+rec_tabs([], _, _, Init) ->
+ unlink(Init),
+ ok.
+
+get_cstructs() ->
+ call(get_cstructs).
+
+mnesia_down(Node) ->
+ case cast({mnesia_down, Node}) of
+ {error, _} -> mnesia_monitor:mnesia_down(?SERVER_NAME, Node);
+ _Pid -> ok
+ end.
+wait_for_schema_commit_lock() ->
+ link(whereis(?SERVER_NAME)),
+ unsafe_call(wait_for_schema_commit_lock).
+
+block_controller() ->
+ call(block_controller).
+
+unblock_controller() ->
+ cast(unblock_controller).
+
+release_schema_commit_lock() ->
+ cast({release_schema_commit_lock, self()}),
+ unlink(whereis(?SERVER_NAME)).
+
+%% Special for preparation of add table copy
+get_network_copy(Tab, Cs) ->
+ Work = #net_load{table = Tab,
+ reason = {dumper, add_table_copy},
+ cstruct = Cs
+ },
+ Res = (catch load_table(Work)),
+ if Res#loader_done.is_loaded == true ->
+ Tab = Res#loader_done.table_name,
+ case Res#loader_done.needs_announce of
+ true ->
+ i_have_tab(Tab);
+ false ->
+ ignore
+ end;
+ true -> ignore
+ end,
+
+ receive %% Flush copier done message
+ {copier_done, _Node} ->
+ ok
+ after 500 -> %% avoid hanging if something is wrong and we shall fail.
+ ignore
+ end,
+ Res#loader_done.reply.
+
+%% This functions is invoked from the dumper
+%%
+%% There are two cases here:
+%% startup ->
+%% no need for sync, since mnesia_controller not started yet
+%% schema_trans ->
+%% already synced with mnesia_controller since the dumper
+%% is syncronously started from mnesia_controller
+
+create_table(Tab) ->
+ {loaded, ok} = mnesia_loader:disc_load_table(Tab, {dumper,create_table}).
+
+get_disc_copy(Tab) ->
+ disc_load_table(Tab, {dumper,change_table_copy_type}, undefined).
+
+%% Returns ok instead of yes
+force_load_table(Tab) when atom(Tab), Tab /= schema ->
+ case ?catch_val({Tab, storage_type}) of
+ ram_copies ->
+ do_force_load_table(Tab);
+ disc_copies ->
+ do_force_load_table(Tab);
+ disc_only_copies ->
+ do_force_load_table(Tab);
+ unknown ->
+ set({Tab, load_by_force}, true),
+ cast({force_load_updated, Tab}),
+ wait_for_tables([Tab], infinity);
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}}
+ end;
+force_load_table(Tab) ->
+ {error, {bad_type, Tab}}.
+
+do_force_load_table(Tab) ->
+ Loaded = ?catch_val({Tab, load_reason}),
+ case Loaded of
+ unknown ->
+ set({Tab, load_by_force}, true),
+ mnesia_late_loader:async_late_disc_load(node(), [Tab], forced_by_user),
+ wait_for_tables([Tab], infinity);
+ {'EXIT', _} ->
+ set({Tab, load_by_force}, true),
+ mnesia_late_loader:async_late_disc_load(node(), [Tab], forced_by_user),
+ wait_for_tables([Tab], infinity);
+ _ ->
+ ok
+ end.
+master_nodes_updated(schema, _Masters) ->
+ ignore;
+master_nodes_updated(Tab, Masters) ->
+ cast({master_nodes_updated, Tab, Masters}).
+
+schedule_late_disc_load(Tabs, Reason) ->
+ MsgTag = late_disc_load,
+ try_schedule_late_disc_load(Tabs, Reason, MsgTag).
+
+try_schedule_late_disc_load(Tabs, _Reason, MsgTag)
+ when Tabs == [], MsgTag /= schema_is_merged ->
+ ignore;
+try_schedule_late_disc_load(Tabs, Reason, MsgTag) ->
+ GetIntents =
+ fun() ->
+ Item = mnesia_late_disc_load,
+ Nodes = val({current, db_nodes}),
+ mnesia:lock({global, Item, Nodes}, write),
+ case multicall(Nodes -- [node()], disc_load_intents) of
+ {Replies, []} ->
+ call({MsgTag, Tabs, Reason, Replies}),
+ done;
+ {_, BadNodes} ->
+ %% Some nodes did not respond, lets try again
+ {retry, BadNodes}
+ end
+ end,
+ case mnesia:transaction(GetIntents) of
+ {'atomic', done} ->
+ done;
+ {'atomic', {retry, BadNodes}} ->
+ verbose("Retry late_load_tables because bad nodes: ~p~n",
+ [BadNodes]),
+ try_schedule_late_disc_load(Tabs, Reason, MsgTag);
+ {aborted, AbortReason} ->
+ fatal("Cannot late_load_tables~p: ~p~n",
+ [[Tabs, Reason, MsgTag], AbortReason])
+ end.
+
+connect_nodes(Ns) ->
+ case mnesia:system_info(is_running) of
+ no ->
+ {error, {node_not_running, node()}};
+ yes ->
+ {NewC, OldC} = mnesia_recover:connect_nodes(Ns),
+ Connected = NewC ++OldC,
+ New1 = mnesia_lib:intersect(Ns, Connected),
+ New = New1 -- val({current, db_nodes}),
+
+ case try_merge_schema(New) of
+ ok ->
+ mnesia_lib:add_list(extra_db_nodes, New),
+ {ok, New};
+ {aborted, {throw, Str}} when list(Str) ->
+ %%mnesia_recover:disconnect_nodes(New),
+ {error, {merge_schema_failed, lists:flatten(Str)}};
+ Else ->
+ %% Unconnect nodes where merge failed!!
+ %% mnesia_recover:disconnect_nodes(New),
+ {error, Else}
+ end
+ end.
+
+%% Merge the local schema with the schema on other nodes.
+%% But first we must let all processes that want to force
+%% load tables wait until the schema merge is done.
+
+merge_schema() ->
+ AllNodes = mnesia_lib:all_nodes(),
+ case try_merge_schema(AllNodes) of
+ ok ->
+ schema_is_merged();
+ {aborted, {throw, Str}} when list(Str) ->
+ fatal("Failed to merge schema: ~s~n", [Str]);
+ Else ->
+ fatal("Failed to merge schema: ~p~n", [Else])
+ end.
+
+try_merge_schema(Nodes) ->
+ case mnesia_schema:merge_schema() of
+ {'atomic', not_merged} ->
+ %% No more nodes that we need to merge the schema with
+ ok;
+ {'atomic', {merged, OldFriends, NewFriends}} ->
+ %% Check if new nodes has been added to the schema
+ Diff = mnesia_lib:all_nodes() -- [node() | Nodes],
+ mnesia_recover:connect_nodes(Diff),
+
+ %% Tell everybody to adopt orphan tables
+ im_running(OldFriends, NewFriends),
+ im_running(NewFriends, OldFriends),
+
+ try_merge_schema(Nodes);
+ {'atomic', {"Cannot get cstructs", Node, Reason}} ->
+ dbg_out("Cannot get cstructs, Node ~p ~p~n", [Node, Reason]),
+ timer:sleep(1000), % Avoid a endless loop look alike
+ try_merge_schema(Nodes);
+ Other ->
+ Other
+ end.
+
+im_running(OldFriends, NewFriends) ->
+ abcast(OldFriends, {im_running, node(), NewFriends}).
+
+schema_is_merged() ->
+ MsgTag = schema_is_merged,
+ SafeLoads = initial_safe_loads(),
+
+ %% At this point we do not know anything about
+ %% which tables that the other nodes already
+ %% has loaded and therefore we let the normal
+ %% processing of the loader_queue take care
+ %% of it, since we at that time point will
+ %% know the whereabouts. We rely on the fact
+ %% that all nodes tells each other directly
+ %% when they have loaded a table and are
+ %% willing to share it.
+
+ try_schedule_late_disc_load(SafeLoads, initial, MsgTag).
+
+
+cast(Msg) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->{error, {node_not_running, node()}};
+ Pid -> gen_server:cast(Pid, Msg)
+ end.
+
+abcast(Nodes, Msg) ->
+ gen_server:abcast(Nodes, ?SERVER_NAME, Msg).
+
+unsafe_call(Msg) ->
+ case whereis(?SERVER_NAME) of
+ undefined -> {error, {node_not_running, node()}};
+ Pid -> gen_server:call(Pid, Msg, infinity)
+ end.
+
+call(Msg) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ link(Pid),
+ Res = gen_server:call(Pid, Msg, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+remote_call(Node, Func, Args) ->
+ case catch gen_server:call({?MODULE, Node}, {Func, Args, self()}, infinity) of
+ {'EXIT', Error} ->
+ {error, Error};
+ Else ->
+ Else
+ end.
+
+multicall(Nodes, Msg) ->
+ {Good, Bad} = gen_server:multi_call(Nodes, ?MODULE, Msg, infinity),
+ PatchedGood = [Reply || {_Node, Reply} <- Good],
+ {PatchedGood, Bad}. %% Make the replies look like rpc:multicalls..
+%% rpc:multicall(Nodes, ?MODULE, call, [Msg]).
+
+%%%----------------------------------------------------------------------
+%%% Callback functions from gen_server
+%%%----------------------------------------------------------------------
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ mnesia_lib:verbose("~p starting: ~p~n", [?SERVER_NAME, self()]),
+
+ %% Handshake and initialize transaction recovery
+ %% for new nodes detected in the schema
+ All = mnesia_lib:all_nodes(),
+ Diff = All -- [node() | val(original_nodes)],
+ mnesia_lib:unset(original_nodes),
+ mnesia_recover:connect_nodes(Diff),
+
+ Interval = mnesia_monitor:get_env(dump_log_time_threshold),
+ Msg = {async_dump_log, time_threshold},
+ {ok, Ref} = timer:send_interval(Interval, Msg),
+ mnesia_dumper:start_regulator(),
+
+ {ok, #state{supervisor = Parent, dump_log_timer_ref = Ref}}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%% {stop, Reason, Reply, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_call({sync_dump_log, InitBy}, From, State) ->
+ Worker = #dump_log{initiated_by = InitBy,
+ opt_reply_to = From
+ },
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_call(wait_for_schema_commit_lock, From, State) ->
+ Worker = #schema_commit_lock{owner = From},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_call(block_controller, From, State) ->
+ Worker = #block_controller{owner = From},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+
+handle_call(get_cstructs, From, State) ->
+ Tabs = val({schema, tables}),
+ Cstructs = [val({T, cstruct}) || T <- Tabs],
+ Running = val({current, db_nodes}),
+ reply(From, {cstructs, Cstructs, Running}),
+ noreply(State);
+
+handle_call({schema_is_merged, TabsR, Reason, RemoteLoaders}, From, State) ->
+ State2 = late_disc_load(TabsR, Reason, RemoteLoaders, From, State),
+
+ %% Handle early messages
+ Msgs = State2#state.early_msgs,
+ State3 = State2#state{early_msgs = [], schema_is_merged = true},
+ Ns = val({current, db_nodes}),
+ dbg_out("Schema is merged ~w, State ~w~n", [Ns, State3]),
+%% dbg_out("handle_early_msgs ~p ~n", [Msgs]), % qqqq
+ handle_early_msgs(lists:reverse(Msgs), State3);
+
+handle_call(disc_load_intents, From, State) ->
+ Tabs = disc_load_intents(State#state.loader_queue) ++
+ disc_load_intents(State#state.late_loader_queue),
+ ActiveTabs = mnesia_lib:local_active_tables(),
+ reply(From, {ok, node(), mnesia_lib:union(Tabs, ActiveTabs)}),
+ noreply(State);
+
+handle_call({update_where_to_write, [add, Tab, AddNode], _From}, _Dummy, State) ->
+%%% dbg_out("update_w2w ~p", [[add, Tab, AddNode]]), %%% qqqq
+ Current = val({current, db_nodes}),
+ Res =
+ case lists:member(AddNode, Current) and
+ State#state.schema_is_merged == true of
+ true ->
+ mnesia_lib:add({Tab, where_to_write}, AddNode);
+ false ->
+ ignore
+ end,
+ {reply, Res, State};
+
+handle_call({add_active_replica, [Tab, ToNode, RemoteS, AccessMode], From},
+ ReplyTo, State) ->
+ KnownNode = lists:member(ToNode, val({current, db_nodes})),
+ Merged = State#state.schema_is_merged,
+ if
+ KnownNode == false ->
+ reply(ReplyTo, ignore),
+ noreply(State);
+ Merged == true ->
+ Res = add_active_replica(Tab, ToNode, RemoteS, AccessMode),
+ reply(ReplyTo, Res),
+ noreply(State);
+ true -> %% Schema is not merged
+ Msg = {add_active_replica, [Tab, ToNode, RemoteS, AccessMode], From},
+ Msgs = State#state.early_msgs,
+ reply(ReplyTo, ignore), %% Reply ignore and add data after schema merge
+ noreply(State#state{early_msgs = [{call, Msg, undefined} | Msgs]})
+ end;
+
+handle_call({unannounce_add_table_copy, [Tab, Node], From}, ReplyTo, State) ->
+ KnownNode = lists:member(node(From), val({current, db_nodes})),
+ Merged = State#state.schema_is_merged,
+ if
+ KnownNode == false ->
+ reply(ReplyTo, ignore),
+ noreply(State);
+ Merged == true ->
+ Res = unannounce_add_table_copy(Tab, Node),
+ reply(ReplyTo, Res),
+ noreply(State);
+ true -> %% Schema is not merged
+ Msg = {unannounce_add_table_copy, [Tab, Node], From},
+ Msgs = State#state.early_msgs,
+ reply(ReplyTo, ignore), %% Reply ignore and add data after schema merge
+ %% Set ReplyTO to undefined so we don't reply twice
+ noreply(State#state{early_msgs = [{call, Msg, undefined} | Msgs]})
+ end;
+
+handle_call(Msg, From, State) when State#state.schema_is_merged == false ->
+ %% Buffer early messages
+%% dbg_out("Buffered early msg ~p ~n", [Msg]), %% qqqq
+ Msgs = State#state.early_msgs,
+ noreply(State#state{early_msgs = [{call, Msg, From} | Msgs]});
+
+handle_call({net_load, Tab, Cs}, From, State) ->
+ Worker = #net_load{table = Tab,
+ opt_reply_to = From,
+ reason = add_table_copy,
+ cstruct = Cs
+ },
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_call({late_disc_load, Tabs, Reason, RemoteLoaders}, From, State) ->
+ State2 = late_disc_load(Tabs, Reason, RemoteLoaders, From, State),
+ noreply(State2);
+
+handle_call({block_table, [Tab], From}, _Dummy, State) ->
+ case lists:member(node(From), val({current, db_nodes})) of
+ true ->
+ block_table(Tab);
+ false ->
+ ignore
+ end,
+ {reply, ok, State};
+
+handle_call({check_w2r, _Node, Tab}, _From, State) ->
+ {reply, val({Tab, where_to_read}), State};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?SERVER_NAME, Msg]),
+ noreply(State).
+
+disc_load_intents([H | T]) when record(H, disc_load) ->
+ [H#disc_load.table | disc_load_intents(T)];
+disc_load_intents([H | T]) when record(H, late_load) ->
+ [H#late_load.table | disc_load_intents(T)];
+disc_load_intents( [H | T]) when record(H, net_load) ->
+ disc_load_intents(T);
+disc_load_intents([]) ->
+ [].
+
+late_disc_load(TabsR, Reason, RemoteLoaders, From, State) ->
+ verbose("Intend to load tables: ~p~n", [TabsR]),
+ ?eval_debug_fun({?MODULE, late_disc_load},
+ [{tabs, TabsR},
+ {reason, Reason},
+ {loaders, RemoteLoaders}]),
+
+ reply(From, queued),
+ %% RemoteLoaders is a list of {ok, Node, Tabs} tuples
+
+ %% Remove deleted tabs
+ LocalTabs = mnesia_lib:val({schema, local_tables}),
+ Filter = fun({Tab, Reas}, Acc) ->
+ case lists:member(Tab, LocalTabs) of
+ true -> [{Tab, Reas} | Acc];
+ false -> Acc
+ end;
+ (Tab, Acc) ->
+ case lists:member(Tab, LocalTabs) of
+ true -> [Tab | Acc];
+ false -> Acc
+ end
+ end,
+
+ Tabs = lists:foldl(Filter, [], TabsR),
+
+ Nodes = val({current, db_nodes}),
+ LateLoaders = late_loaders(Tabs, Reason, RemoteLoaders, Nodes),
+ LateQueue = State#state.late_loader_queue ++ LateLoaders,
+ State#state{late_loader_queue = LateQueue}.
+
+late_loaders([{Tab, Reason} | Tabs], DefaultReason, RemoteLoaders, Nodes) ->
+ LoadNodes = late_load_filter(RemoteLoaders, Tab, Nodes, []),
+ case LoadNodes of
+ [] ->
+ cast({disc_load, Tab, Reason}); % Ugly cast
+ _ ->
+ ignore
+ end,
+ LateLoad = #late_load{table = Tab, loaders = LoadNodes, reason = Reason},
+ [LateLoad | late_loaders(Tabs, DefaultReason, RemoteLoaders, Nodes)];
+
+late_loaders([Tab | Tabs], Reason, RemoteLoaders, Nodes) ->
+ Loaders = late_load_filter(RemoteLoaders, Tab, Nodes, []),
+ case Loaders of
+ [] ->
+ cast({disc_load, Tab, Reason}); % Ugly cast
+ _ ->
+ ignore
+ end,
+ LateLoad = #late_load{table = Tab, loaders = Loaders, reason = Reason},
+ [LateLoad | late_loaders(Tabs, Reason, RemoteLoaders, Nodes)];
+late_loaders([], _Reason, _RemoteLoaders, _Nodes) ->
+ [].
+
+late_load_filter([{error, _} | RemoteLoaders], Tab, Nodes, Acc) ->
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc);
+late_load_filter([{badrpc, _} | RemoteLoaders], Tab, Nodes, Acc) ->
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc);
+late_load_filter([RL | RemoteLoaders], Tab, Nodes, Acc) ->
+ {ok, Node, Intents} = RL,
+ Access = val({Tab, access_mode}),
+ LocalC = val({Tab, local_content}),
+ StillActive = lists:member(Node, Nodes),
+ RemoteIntent = lists:member(Tab, Intents),
+ if
+ Access == read_write,
+ LocalC == false,
+ StillActive == true,
+ RemoteIntent == true ->
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ case lists:member(Node, Masters) of
+ true ->
+ %% The other node is master node for
+ %% the table, accept his load intent
+ late_load_filter(RemoteLoaders, Tab, Nodes, [Node | Acc]);
+ false when Masters == [] ->
+ %% The table has no master nodes
+ %% accept his load intent
+ late_load_filter(RemoteLoaders, Tab, Nodes, [Node | Acc]);
+ false ->
+ %% Some one else is master node for
+ %% the table, ignore his load intent
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc)
+ end;
+ true ->
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc)
+ end;
+late_load_filter([], _Tab, _Nodes, Acc) ->
+ Acc.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_cast({release_schema_commit_lock, _Owner}, State) ->
+ if
+ State#state.is_stopping == true ->
+ {stop, shutdown, State};
+ true ->
+ case State#state.dumper_queue of
+ [#schema_commit_lock{}|Rest] ->
+ [_Worker | Rest] = State#state.dumper_queue,
+ State2 = State#state{dumper_pid = undefined,
+ dumper_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ _ ->
+ noreply(State)
+ end
+ end;
+
+handle_cast(unblock_controller, State) ->
+ if
+ State#state.is_stopping == true ->
+ {stop, shutdown, State};
+ record(hd(State#state.dumper_queue), block_controller) ->
+ [_Worker | Rest] = State#state.dumper_queue,
+ State2 = State#state{dumper_pid = undefined,
+ dumper_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3)
+ end;
+
+handle_cast({mnesia_down, Node}, State) ->
+ maybe_log_mnesia_down(Node),
+ mnesia_lib:del({current, db_nodes}, Node),
+ mnesia_checkpoint:tm_mnesia_down(Node),
+ Alltabs = val({schema, tables}),
+ State2 = reconfigure_tables(Node, State, Alltabs),
+ case State#state.sender_pid of
+ undefined -> ignore;
+ Pid when pid(Pid) -> Pid ! {copier_done, Node}
+ end,
+ case State#state.loader_pid of
+ undefined -> ignore;
+ Pid2 when pid(Pid2) -> Pid2 ! {copier_done, Node}
+ end,
+ NewSenders =
+ case State#state.sender_queue of
+ [OldSender | RestSenders] ->
+ Remove = fun(ST) ->
+ node(ST#send_table.receiver_pid) /= Node
+ end,
+ NewS = lists:filter(Remove, RestSenders),
+ %% Keep old sender it will be removed by sender_done
+ [OldSender | NewS];
+ [] ->
+ []
+ end,
+ Early = remove_early_messages(State2#state.early_msgs, Node),
+ mnesia_monitor:mnesia_down(?SERVER_NAME, Node),
+ noreply(State2#state{sender_queue = NewSenders, early_msgs = Early});
+
+handle_cast({im_running, _Node, NewFriends}, State) ->
+ Tabs = mnesia_lib:local_active_tables() -- [schema],
+ Ns = mnesia_lib:intersect(NewFriends, val({current, db_nodes})),
+ abcast(Ns, {adopt_orphans, node(), Tabs}),
+ noreply(State);
+
+handle_cast(Msg, State) when State#state.schema_is_merged == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ noreply(State#state{early_msgs = [{cast, Msg} | Msgs]});
+
+handle_cast({disc_load, Tab, Reason}, State) ->
+ Worker = #disc_load{table = Tab, reason = Reason},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_cast(Worker, State) when record(Worker, send_table) ->
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_cast({sync_tabs, Tabs, From}, State) ->
+ %% user initiated wait_for_tables
+ handle_sync_tabs(Tabs, From),
+ noreply(State);
+
+handle_cast({i_have_tab, Tab, Node}, State) ->
+ case lists:member(Node, val({current, db_nodes})) of
+ true ->
+ State2 = node_has_tabs([Tab], Node, State),
+ noreply(State2);
+ false ->
+ noreply(State)
+ end;
+
+handle_cast({force_load_updated, Tab}, State) ->
+ case val({Tab, active_replicas}) of
+ [] ->
+ %% No valid replicas
+ noreply(State);
+ [SomeNode | _] ->
+ State2 = node_has_tabs([Tab], SomeNode, State),
+ noreply(State2)
+ end;
+
+handle_cast({master_nodes_updated, Tab, Masters}, State) ->
+ Active = val({Tab, active_replicas}),
+ Valid =
+ case val({Tab, load_by_force}) of
+ true ->
+ Active;
+ false ->
+ if
+ Masters == [] ->
+ Active;
+ true ->
+ mnesia_lib:intersect(Masters, Active)
+ end
+ end,
+ case Valid of
+ [] ->
+ %% No valid replicas
+ noreply(State);
+ [SomeNode | _] ->
+ State2 = node_has_tabs([Tab], SomeNode, State),
+ noreply(State2)
+ end;
+
+handle_cast({adopt_orphans, Node, Tabs}, State) ->
+
+ State2 = node_has_tabs(Tabs, Node, State),
+
+ %% Register the other node as up and running
+ mnesia_recover:log_mnesia_up(Node),
+ verbose("Logging mnesia_up ~w~n", [Node]),
+ mnesia_lib:report_system_event({mnesia_up, Node}),
+
+ %% Load orphan tables
+ LocalTabs = val({schema, local_tables}) -- [schema],
+ Nodes = val({current, db_nodes}),
+ {LocalOrphans, RemoteMasters} =
+ orphan_tables(LocalTabs, Node, Nodes, [], []),
+ Reason = {adopt_orphan, node()},
+ mnesia_late_loader:async_late_disc_load(node(), LocalOrphans, Reason),
+
+ Fun =
+ fun(N) ->
+ RemoteOrphans =
+ [Tab || {Tab, Ns} <- RemoteMasters,
+ lists:member(N, Ns)],
+ mnesia_late_loader:maybe_async_late_disc_load(N, RemoteOrphans, Reason)
+ end,
+ lists:foreach(Fun, Nodes),
+
+ Queue = State2#state.loader_queue,
+ State3 = State2#state{loader_queue = Queue},
+ noreply(State3);
+
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?SERVER_NAME, Msg]),
+ noreply(State).
+
+handle_sync_tabs([Tab | Tabs], From) ->
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ case get({sync_tab, Tab}) of
+ undefined ->
+ put({sync_tab, Tab}, [From]);
+ Pids ->
+ put({sync_tab, Tab}, [From | Pids])
+ end;
+ _ ->
+ sync_reply(From, Tab)
+ end,
+ handle_sync_tabs(Tabs, From);
+handle_sync_tabs([], _From) ->
+ ok.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_info({async_dump_log, InitBy}, State) ->
+ Worker = #dump_log{initiated_by = InitBy},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_info(Done, State) when record(Done, dumper_done) ->
+ Pid = Done#dumper_done.worker_pid,
+ Res = Done#dumper_done.worker_res,
+ if
+ State#state.is_stopping == true ->
+ {stop, shutdown, State};
+ Res == dumped, Pid == State#state.dumper_pid ->
+ [Worker | Rest] = State#state.dumper_queue,
+ reply(Worker#dump_log.opt_reply_to, Res),
+ State2 = State#state{dumper_pid = undefined,
+ dumper_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ true ->
+ fatal("Dumper failed: ~p~n state: ~p~n", [Res, State]),
+ {stop, fatal, State}
+ end;
+
+handle_info(Done, State) when record(Done, loader_done) ->
+ if
+ %% Assertion
+ Done#loader_done.worker_pid == State#state.loader_pid -> ok
+ end,
+
+ [_Worker | Rest] = LoadQ0 = State#state.loader_queue,
+ LateQueue0 = State#state.late_loader_queue,
+ {LoadQ, LateQueue} =
+ case Done#loader_done.is_loaded of
+ true ->
+ Tab = Done#loader_done.table_name,
+
+ %% Optional user sync
+ case Done#loader_done.needs_sync of
+ true -> user_sync_tab(Tab);
+ false -> ignore
+ end,
+
+ %% Optional table announcement
+ case Done#loader_done.needs_announce of
+ true ->
+ i_have_tab(Tab),
+ case Tab of
+ schema ->
+ ignore;
+ _ ->
+ %% Local node needs to perform user_sync_tab/1
+ Ns = val({current, db_nodes}),
+ abcast(Ns, {i_have_tab, Tab, node()})
+ end;
+ false ->
+ case Tab of
+ schema ->
+ ignore;
+ _ ->
+ %% Local node needs to perform user_sync_tab/1
+ Ns = val({current, db_nodes}),
+ AlreadyKnows = val({Tab, active_replicas}),
+ abcast(Ns -- AlreadyKnows, {i_have_tab, Tab, node()})
+ end
+ end,
+
+ %% Optional client reply
+ case Done#loader_done.needs_reply of
+ true ->
+ reply(Done#loader_done.reply_to,
+ Done#loader_done.reply);
+ false ->
+ ignore
+ end,
+ {Rest, reply_late_load(Tab, LateQueue0)};
+ false ->
+ case Done#loader_done.reply of
+ restart ->
+ {LoadQ0, LateQueue0};
+ _ ->
+ {Rest, LateQueue0}
+ end
+ end,
+
+ State2 = State#state{loader_pid = undefined,
+ loader_queue = LoadQ,
+ late_loader_queue = LateQueue},
+
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+
+handle_info(Done, State) when record(Done, sender_done) ->
+ Pid = Done#sender_done.worker_pid,
+ Res = Done#sender_done.worker_res,
+ if
+ Res == ok, Pid == State#state.sender_pid ->
+ [Worker | Rest] = State#state.sender_queue,
+ Worker#send_table.receiver_pid ! {copier_done, node()},
+ State2 = State#state{sender_pid = undefined,
+ sender_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ true ->
+ %% No need to send any message to the table receiver
+ %% since it will soon get a mnesia_down anyway
+ fatal("Sender failed: ~p~n state: ~p~n", [Res, State]),
+ {stop, fatal, State}
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
+ catch set(mnesia_status, stopping),
+ case State#state.dumper_pid of
+ undefined ->
+ dbg_out("~p was ~p~n", [?SERVER_NAME, R]),
+ {stop, shutdown, State};
+ _ ->
+ noreply(State#state{is_stopping = true})
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.dumper_pid ->
+ case State#state.dumper_queue of
+ [#schema_commit_lock{}|Workers] -> %% Schema trans crashed or was killed
+ State2 = State#state{dumper_queue = Workers, dumper_pid = undefined},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ _Other ->
+ fatal("Dumper or schema commit crashed: ~p~n state: ~p~n", [R, State]),
+ {stop, fatal, State}
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.loader_pid ->
+ fatal("Loader crashed: ~p~n state: ~p~n", [R, State]),
+ {stop, fatal, State};
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.sender_pid ->
+ %% No need to send any message to the table receiver
+ %% since it will soon get a mnesia_down anyway
+ fatal("Sender crashed: ~p~n state: ~p~n", [R, State]),
+ {stop, fatal, State};
+
+handle_info({From, get_state}, State) ->
+ From ! {?SERVER_NAME, State},
+ noreply(State);
+
+%% No real need for buffering
+handle_info(Msg, State) when State#state.schema_is_merged == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ noreply(State#state{early_msgs = [{info, Msg} | Msgs]});
+
+handle_info({'EXIT', Pid, wait_for_tables_timeout}, State) ->
+ sync_tab_timeout(Pid, get()),
+ noreply(State);
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info: ~p~n", [?SERVER_NAME, Msg]),
+ noreply(State).
+
+reply_late_load(Tab, [H | T]) when H#late_load.table == Tab ->
+ reply(H#late_load.opt_reply_to, ok),
+ reply_late_load(Tab, T);
+reply_late_load(Tab, [H | T]) ->
+ [H | reply_late_load(Tab, T)];
+reply_late_load(_Tab, []) ->
+ [].
+
+sync_tab_timeout(Pid, [{{sync_tab, Tab}, Pids} | Tail]) ->
+ case lists:delete(Pid, Pids) of
+ [] ->
+ erase({sync_tab, Tab});
+ Pids2 ->
+ put({sync_tab, Tab}, Pids2)
+ end,
+ sync_tab_timeout(Pid, Tail);
+sync_tab_timeout(Pid, [_ | Tail]) ->
+ sync_tab_timeout(Pid, Tail);
+sync_tab_timeout(_Pid, []) ->
+ ok.
+
+%% Pick the load record that has the highest load order
+%% Returns {BestLoad, RemainingQueue} or {none, []} if queue is empty
+pick_next(Queue) ->
+ pick_next(Queue, none, none, []).
+
+pick_next([Head | Tail], Load, Order, Rest) when record(Head, net_load) ->
+ Tab = Head#net_load.table,
+ select_best(Head, Tail, val({Tab, load_order}), Load, Order, Rest);
+pick_next([Head | Tail], Load, Order, Rest) when record(Head, disc_load) ->
+ Tab = Head#disc_load.table,
+ select_best(Head, Tail, val({Tab, load_order}), Load, Order, Rest);
+pick_next([], Load, _Order, Rest) ->
+ {Load, Rest}.
+
+select_best(Load, Tail, Order, none, none, Rest) ->
+ pick_next(Tail, Load, Order, Rest);
+select_best(Load, Tail, Order, OldLoad, OldOrder, Rest) when Order > OldOrder ->
+ pick_next(Tail, Load, Order, [OldLoad | Rest]);
+select_best(Load, Tail, _Order, OldLoad, OldOrder, Rest) ->
+ pick_next(Tail, OldLoad, OldOrder, [Load | Rest]).
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+terminate(Reason, State) ->
+ mnesia_monitor:terminate_proc(?SERVER_NAME, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+maybe_log_mnesia_down(N) ->
+ %% We use mnesia_down when deciding which tables to load locally,
+ %% so if we are not running (i.e haven't decided which tables
+ %% to load locally), don't log mnesia_down yet.
+ case mnesia_lib:is_running() of
+ yes ->
+ verbose("Logging mnesia_down ~w~n", [N]),
+ mnesia_recover:log_mnesia_down(N),
+ ok;
+ _ ->
+ Filter = fun(Tab) ->
+ inactive_copy_holders(Tab, N)
+ end,
+ HalfLoadedTabs = lists:any(Filter, val({schema, local_tables}) -- [schema]),
+ if
+ HalfLoadedTabs == true ->
+ verbose("Logging mnesia_down ~w~n", [N]),
+ mnesia_recover:log_mnesia_down(N),
+ ok;
+ true ->
+ %% Unfortunately we have not loaded some common
+ %% tables yet, so we cannot rely on the nodedown
+ log_later %% BUGBUG handle this case!!!
+ end
+ end.
+
+inactive_copy_holders(Tab, Node) ->
+ Cs = val({Tab, cstruct}),
+ case mnesia_lib:cs_to_storage_type(Node, Cs) of
+ unknown ->
+ false;
+ _Storage ->
+ mnesia_lib:not_active_here(Tab)
+ end.
+
+orphan_tables([Tab | Tabs], Node, Ns, Local, Remote) ->
+ Cs = val({Tab, cstruct}),
+ CopyHolders = mnesia_lib:copy_holders(Cs),
+ RamCopyHolders = Cs#cstruct.ram_copies,
+ DiscCopyHolders = CopyHolders -- RamCopyHolders,
+ DiscNodes = val({schema, disc_copies}),
+ LocalContent = Cs#cstruct.local_content,
+ RamCopyHoldersOnDiscNodes = mnesia_lib:intersect(RamCopyHolders, DiscNodes),
+ Active = val({Tab, active_replicas}),
+ case lists:member(Node, DiscCopyHolders) of
+ true when Active == [] ->
+ case DiscCopyHolders -- Ns of
+ [] ->
+ %% We're last up and the other nodes have not
+ %% loaded the table. Lets load it if we are
+ %% the smallest node.
+ case lists:min(DiscCopyHolders) of
+ Min when Min == node() ->
+ case mnesia_recover:get_master_nodes(Tab) of
+ [] ->
+ L = [Tab | Local],
+ orphan_tables(Tabs, Node, Ns, L, Remote);
+ Masters ->
+ R = [{Tab, Masters} | Remote],
+ orphan_tables(Tabs, Node, Ns, Local, R)
+ end;
+ _ ->
+ orphan_tables(Tabs, Node, Ns, Local, Remote)
+ end;
+ _ ->
+ orphan_tables(Tabs, Node, Ns, Local, Remote)
+ end;
+ false when Active == [], DiscCopyHolders == [], RamCopyHoldersOnDiscNodes == [] ->
+ %% Special case when all replicas resides on disc less nodes
+ orphan_tables(Tabs, Node, Ns, [Tab | Local], Remote);
+ _ when LocalContent == true ->
+ orphan_tables(Tabs, Node, Ns, [Tab | Local], Remote);
+ _ ->
+ orphan_tables(Tabs, Node, Ns, Local, Remote)
+ end;
+orphan_tables([], _, _, LocalOrphans, RemoteMasters) ->
+ {LocalOrphans, RemoteMasters}.
+
+node_has_tabs([Tab | Tabs], Node, State) when Node /= node() ->
+ State2 = update_whereabouts(Tab, Node, State),
+ node_has_tabs(Tabs, Node, State2);
+node_has_tabs([Tab | Tabs], Node, State) ->
+ user_sync_tab(Tab),
+ node_has_tabs(Tabs, Node, State);
+node_has_tabs([], _Node, State) ->
+ State.
+
+update_whereabouts(Tab, Node, State) ->
+ Storage = val({Tab, storage_type}),
+ Read = val({Tab, where_to_read}),
+ LocalC = val({Tab, local_content}),
+ BeingCreated = (?catch_val({Tab, create_table}) == true),
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ ByForce = val({Tab, load_by_force}),
+ GoGetIt =
+ if
+ ByForce == true ->
+ true;
+ Masters == [] ->
+ true;
+ true ->
+ lists:member(Node, Masters)
+ end,
+
+ dbg_out("Table ~w is loaded on ~w. s=~w, r=~w, lc=~w, f=~w, m=~w~n",
+ [Tab, Node, Storage, Read, LocalC, ByForce, GoGetIt]),
+ if
+ LocalC == true ->
+ %% Local contents, don't care about other node
+ State;
+ Storage == unknown, Read == nowhere ->
+ %% No own copy, time to read remotely
+ %% if the other node is a good node
+ add_active_replica(Tab, Node),
+ case GoGetIt of
+ true ->
+ set({Tab, where_to_read}, Node),
+ user_sync_tab(Tab),
+ State;
+ false ->
+ State
+ end;
+ Storage == unknown ->
+ %% No own copy, continue to read remotely
+ add_active_replica(Tab, Node),
+ NodeST = mnesia_lib:storage_type_at_node(Node, Tab),
+ ReadST = mnesia_lib:storage_type_at_node(Read, Tab),
+ if %% Avoid reading from disc_only_copies
+ NodeST == disc_only_copies ->
+ ignore;
+ ReadST == disc_only_copies ->
+ mnesia_lib:set_remote_where_to_read(Tab);
+ true ->
+ ignore
+ end,
+ user_sync_tab(Tab),
+ State;
+ BeingCreated == true ->
+ %% The table is currently being created
+ %% and we shall have an own copy of it.
+ %% We will load the (empty) table locally.
+ add_active_replica(Tab, Node),
+ State;
+ Read == nowhere ->
+ %% Own copy, go and get a copy of the table
+ %% if the other node is master or if there
+ %% are no master at all
+ add_active_replica(Tab, Node),
+ case GoGetIt of
+ true ->
+ Worker = #net_load{table = Tab,
+ reason = {active_remote, Node}},
+ add_worker(Worker, State);
+ false ->
+ State
+ end;
+ true ->
+ %% We already have an own copy
+ add_active_replica(Tab, Node),
+ user_sync_tab(Tab),
+ State
+ end.
+
+initial_safe_loads() ->
+ case val({schema, storage_type}) of
+ ram_copies ->
+ Downs = [],
+ Tabs = val({schema, local_tables}) -- [schema],
+ LastC = fun(T) -> last_consistent_replica(T, Downs) end,
+ lists:zf(LastC, Tabs);
+
+ disc_copies ->
+ Downs = mnesia_recover:get_mnesia_downs(),
+ dbg_out("mnesia_downs = ~p~n", [Downs]),
+
+ Tabs = val({schema, local_tables}) -- [schema],
+ LastC = fun(T) -> last_consistent_replica(T, Downs) end,
+ lists:zf(LastC, Tabs)
+ end.
+
+last_consistent_replica(Tab, Downs) ->
+ Cs = val({Tab, cstruct}),
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ Ram = Cs#cstruct.ram_copies,
+ Disc = Cs#cstruct.disc_copies,
+ DiscOnly = Cs#cstruct.disc_only_copies,
+ BetterCopies0 = mnesia_lib:remote_copy_holders(Cs) -- Downs,
+ BetterCopies = BetterCopies0 -- Ram,
+ AccessMode = Cs#cstruct.access_mode,
+ Copies = mnesia_lib:copy_holders(Cs),
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ LocalMaster0 = lists:member(node(), Masters),
+ LocalContent = Cs#cstruct.local_content,
+ RemoteMaster =
+ if
+ Masters == [] -> false;
+ true -> not LocalMaster0
+ end,
+ LocalMaster =
+ if
+ Masters == [] -> false;
+ true -> LocalMaster0
+ end,
+ if
+ Copies == [node()] ->
+ %% Only one copy holder and it is local.
+ %% It may also be a local contents table
+ {true, {Tab, local_only}};
+ LocalContent == true ->
+ {true, {Tab, local_content}};
+ LocalMaster == true ->
+ %% We have a local master
+ {true, {Tab, local_master}};
+ RemoteMaster == true ->
+ %% Wait for remote master copy
+ false;
+ Storage == ram_copies ->
+ if
+ Disc == [], DiscOnly == [] ->
+ %% Nobody has copy on disc
+ {true, {Tab, ram_only}};
+ true ->
+ %% Some other node has copy on disc
+ false
+ end;
+ AccessMode == read_only ->
+ %% No one has been able to update the table,
+ %% i.e. all disc resident copies are equal
+ {true, {Tab, read_only}};
+ BetterCopies /= [], Masters /= [node()] ->
+ %% There are better copies on other nodes
+ %% and we do not have the only master copy
+ false;
+ true ->
+ {true, {Tab, initial}}
+ end.
+
+reconfigure_tables(N, State, [Tab |Tail]) ->
+ del_active_replica(Tab, N),
+ case val({Tab, where_to_read}) of
+ N -> mnesia_lib:set_remote_where_to_read(Tab);
+ _ -> ignore
+ end,
+ LateQ = drop_loaders(Tab, N, State#state.late_loader_queue),
+ reconfigure_tables(N, State#state{late_loader_queue = LateQ}, Tail);
+
+reconfigure_tables(_, State, []) ->
+ State.
+
+remove_early_messages([], _Node) ->
+ [];
+remove_early_messages([{call, {add_active_replica, [_, Node, _, _], _}, _}|R], Node) ->
+ remove_early_messages(R, Node); %% Does a reply before queuing
+remove_early_messages([{call, {block_table, _, From}, ReplyTo}|R], Node)
+ when node(From) == Node ->
+ reply(ReplyTo, ok), %% Remove gen:server waits..
+ remove_early_messages(R, Node);
+remove_early_messages([{cast, {i_have_tab, _Tab, Node}}|R], Node) ->
+ remove_early_messages(R, Node);
+remove_early_messages([{cast, {adopt_orphans, Node, _Tabs}}|R], Node) ->
+ remove_early_messages(R, Node);
+remove_early_messages([M|R],Node) ->
+ [M|remove_early_messages(R,Node)].
+
+%% Drop loader from late load queue and possibly trigger a disc_load
+drop_loaders(Tab, Node, [H | T]) when H#late_load.table == Tab ->
+ %% Check if it is time to issue a disc_load request
+ case H#late_load.loaders of
+ [Node] ->
+ Reason = {H#late_load.reason, last_loader_down, Node},
+ cast({disc_load, Tab, Reason}); % Ugly cast
+ _ ->
+ ignore
+ end,
+ %% Drop the node from the list of loaders
+ H2 = H#late_load{loaders = H#late_load.loaders -- [Node]},
+ [H2 | drop_loaders(Tab, Node, T)];
+drop_loaders(Tab, Node, [H | T]) ->
+ [H | drop_loaders(Tab, Node, T)];
+drop_loaders(_, _, []) ->
+ [].
+
+add_active_replica(Tab, Node) ->
+ add_active_replica(Tab, Node, val({Tab, cstruct})).
+
+add_active_replica(Tab, Node, Cs) when record(Cs, cstruct) ->
+ Storage = mnesia_lib:schema_cs_to_storage_type(Node, Cs),
+ AccessMode = Cs#cstruct.access_mode,
+ add_active_replica(Tab, Node, Storage, AccessMode).
+
+%% Block table primitives
+
+block_table(Tab) ->
+ Var = {Tab, where_to_commit},
+ Old = val(Var),
+ New = {blocked, Old},
+ set(Var, New). % where_to_commit
+
+unblock_table(Tab) ->
+ Var = {Tab, where_to_commit},
+ New =
+ case val(Var) of
+ {blocked, List} ->
+ List;
+ List ->
+ List
+ end,
+ set(Var, New). % where_to_commit
+
+is_tab_blocked(W2C) when list(W2C) ->
+ {false, W2C};
+is_tab_blocked({blocked, W2C}) when list(W2C) ->
+ {true, W2C}.
+
+mark_blocked_tab(true, Value) ->
+ {blocked, Value};
+mark_blocked_tab(false, Value) ->
+ Value.
+
+%%
+
+add_active_replica(Tab, Node, Storage, AccessMode) ->
+ Var = {Tab, where_to_commit},
+ {Blocked, Old} = is_tab_blocked(val(Var)),
+ Del = lists:keydelete(Node, 1, Old),
+ case AccessMode of
+ read_write ->
+ New = lists:sort([{Node, Storage} | Del]),
+ set(Var, mark_blocked_tab(Blocked, New)), % where_to_commit
+ add({Tab, where_to_write}, Node);
+ read_only ->
+ set(Var, mark_blocked_tab(Blocked, Del)),
+ mnesia_lib:del({Tab, where_to_write}, Node)
+ end,
+ add({Tab, active_replicas}, Node).
+
+del_active_replica(Tab, Node) ->
+ Var = {Tab, where_to_commit},
+ {Blocked, Old} = is_tab_blocked(val(Var)),
+ Del = lists:keydelete(Node, 1, Old),
+ New = lists:sort(Del),
+ set(Var, mark_blocked_tab(Blocked, New)), % where_to_commit
+ mnesia_lib:del({Tab, active_replicas}, Node),
+ mnesia_lib:del({Tab, where_to_write}, Node).
+
+change_table_access_mode(Cs) ->
+ Tab = Cs#cstruct.name,
+ lists:foreach(fun(N) -> add_active_replica(Tab, N, Cs) end,
+ val({Tab, active_replicas})).
+
+%% node To now has tab loaded, but this must be undone
+%% This code is rpc:call'ed from the tab_copier process
+%% when it has *not* released it's table lock
+unannounce_add_table_copy(Tab, To) ->
+ del_active_replica(Tab, To),
+ case val({Tab , where_to_read}) of
+ To ->
+ mnesia_lib:set_remote_where_to_read(Tab);
+ _ ->
+ ignore
+ end.
+
+user_sync_tab(Tab) ->
+ case val(debug) of
+ trace ->
+ mnesia_subscr:subscribe(whereis(mnesia_event), {table, Tab});
+ _ ->
+ ignore
+ end,
+
+ case erase({sync_tab, Tab}) of
+ undefined ->
+ ok;
+ Pids ->
+ lists:foreach(fun(Pid) -> sync_reply(Pid, Tab) end, Pids)
+ end.
+
+i_have_tab(Tab) ->
+ case val({Tab, local_content}) of
+ true ->
+ mnesia_lib:set_local_content_whereabouts(Tab);
+ false ->
+ set({Tab, where_to_read}, node())
+ end,
+ add_active_replica(Tab, node()).
+
+sync_and_block_table_whereabouts(Tab, ToNode, RemoteS, AccessMode) when Tab /= schema ->
+ Current = val({current, db_nodes}),
+ Ns =
+ case lists:member(ToNode, Current) of
+ true -> Current -- [ToNode];
+ false -> Current
+ end,
+ remote_call(ToNode, block_table, [Tab]),
+ [remote_call(Node, add_active_replica, [Tab, ToNode, RemoteS, AccessMode]) ||
+ Node <- [ToNode | Ns]],
+ ok.
+
+sync_del_table_copy_whereabouts(Tab, ToNode) when Tab /= schema ->
+ Current = val({current, db_nodes}),
+ Ns =
+ case lists:member(ToNode, Current) of
+ true -> Current;
+ false -> [ToNode | Current]
+ end,
+ Args = [Tab, ToNode],
+ [remote_call(Node, unannounce_add_table_copy, Args) || Node <- Ns],
+ ok.
+
+get_info(Timeout) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->
+ {timeout, Timeout};
+ Pid ->
+ Pid ! {self(), get_state},
+ receive
+ {?SERVER_NAME, State} when record(State, state) ->
+ {info,State}
+ after Timeout ->
+ {timeout, Timeout}
+ end
+ end.
+
+get_workers(Timeout) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->
+ {timeout, Timeout};
+ Pid ->
+ Pid ! {self(), get_state},
+ receive
+ {?SERVER_NAME, State} when record(State, state) ->
+ {workers, State#state.loader_pid, State#state.sender_pid, State#state.dumper_pid}
+ after Timeout ->
+ {timeout, Timeout}
+ end
+ end.
+
+info() ->
+ Tabs = mnesia_lib:local_active_tables(),
+ io:format( "---> Active tables <--- ~n", []),
+ info(Tabs).
+
+info([Tab | Tail]) ->
+ case val({Tab, storage_type}) of
+ disc_only_copies ->
+ info_format(Tab,
+ dets:info(Tab, size),
+ dets:info(Tab, file_size),
+ "bytes on disc");
+ _ ->
+ info_format(Tab,
+ ?ets_info(Tab, size),
+ ?ets_info(Tab, memory),
+ "words of mem")
+ end,
+ info(Tail);
+info([]) -> ok;
+info(Tab) -> info([Tab]).
+
+info_format(Tab, Size, Mem, Media) ->
+ StrT = mnesia_lib:pad_name(atom_to_list(Tab), 15, []),
+ StrS = mnesia_lib:pad_name(integer_to_list(Size), 8, []),
+ StrM = mnesia_lib:pad_name(integer_to_list(Mem), 8, []),
+ io:format("~s: with ~s records occupying ~s ~s~n",
+ [StrT, StrS, StrM, Media]).
+
+%% Handle early arrived messages
+handle_early_msgs([Msg | Msgs], State) ->
+ %% The messages are in reverse order
+ case handle_early_msg(Msg, State) of
+ {stop, Reason, Reply, State2} ->
+ {stop, Reason, Reply, State2};
+ {stop, Reason, State2} ->
+ {stop, Reason, State2};
+ {noreply, State2} ->
+ handle_early_msgs(Msgs, State2);
+ {noreply, State2, _Timeout} ->
+ handle_early_msgs(Msgs, State2);
+ Else ->
+ dbg_out("handle_early_msgs case clause ~p ~n", [Else]),
+ erlang:error(Else, [[Msg | Msgs], State])
+ end;
+handle_early_msgs([], State) ->
+ noreply(State).
+
+handle_early_msg({call, Msg, From}, State) ->
+ handle_call(Msg, From, State);
+handle_early_msg({cast, Msg}, State) ->
+ handle_cast(Msg, State);
+handle_early_msg({info, Msg}, State) ->
+ handle_info(Msg, State).
+
+noreply(State) ->
+ {noreply, State}.
+
+reply(undefined, Reply) ->
+ Reply;
+reply(ReplyTo, Reply) ->
+ gen_server:reply(ReplyTo, Reply),
+ Reply.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Worker management
+
+%% Returns new State
+add_worker(Worker, State) when record(Worker, dump_log) ->
+ InitBy = Worker#dump_log.initiated_by,
+ Queue = State#state.dumper_queue,
+ case lists:keymember(InitBy, #dump_log.initiated_by, Queue) of
+ false ->
+ ignore;
+ true when Worker#dump_log.opt_reply_to == undefined ->
+ %% The same threshold has been exceeded again,
+ %% before we have had the possibility to
+ %% process the older one.
+ DetectedBy = {dump_log, InitBy},
+ Event = {mnesia_overload, DetectedBy},
+ mnesia_lib:report_system_event(Event)
+ end,
+ Queue2 = Queue ++ [Worker],
+ State2 = State#state{dumper_queue = Queue2},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, schema_commit_lock) ->
+ Queue = State#state.dumper_queue,
+ Queue2 = Queue ++ [Worker],
+ State2 = State#state{dumper_queue = Queue2},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, net_load) ->
+ Queue = State#state.loader_queue,
+ State2 = State#state{loader_queue = Queue ++ [Worker]},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, send_table) ->
+ Queue = State#state.sender_queue,
+ State2 = State#state{sender_queue = Queue ++ [Worker]},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, disc_load) ->
+ Queue = State#state.loader_queue,
+ State2 = State#state{loader_queue = Queue ++ [Worker]},
+ opt_start_worker(State2);
+% Block controller should be used for upgrading mnesia.
+add_worker(Worker, State) when record(Worker, block_controller) ->
+ Queue = State#state.dumper_queue,
+ Queue2 = [Worker | Queue],
+ State2 = State#state{dumper_queue = Queue2},
+ opt_start_worker(State2).
+
+%% Optionally start a worker
+%%
+%% Dumpers and loaders may run simultaneously
+%% but neither of them may run during schema commit.
+%% Loaders may not start if a schema commit is enqueued.
+opt_start_worker(State) when State#state.is_stopping == true ->
+ State;
+opt_start_worker(State) ->
+ %% Prioritize dumper and schema commit
+ %% by checking them first
+ case State#state.dumper_queue of
+ [Worker | _Rest] when State#state.dumper_pid == undefined ->
+ %% Great, a worker in queue and neither
+ %% a schema transaction is being
+ %% committed and nor a dumper is running
+
+ %% Start worker but keep him in the queue
+ if
+ record(Worker, schema_commit_lock) ->
+ ReplyTo = Worker#schema_commit_lock.owner,
+ reply(ReplyTo, granted),
+ {Owner, _Tag} = ReplyTo,
+ State#state{dumper_pid = Owner};
+
+ record(Worker, dump_log) ->
+ Pid = spawn_link(?MODULE, dump_and_reply, [self(), Worker]),
+ State2 = State#state{dumper_pid = Pid},
+
+ %% If the worker was a dumper we may
+ %% possibly be able to start a loader
+ %% or sender
+ State3 = opt_start_sender(State2),
+ opt_start_loader(State3);
+
+ record(Worker, block_controller) ->
+ case {State#state.sender_pid, State#state.loader_pid} of
+ {undefined, undefined} ->
+ ReplyTo = Worker#block_controller.owner,
+ reply(ReplyTo, granted),
+ {Owner, _Tag} = ReplyTo,
+ State#state{dumper_pid = Owner};
+ _ ->
+ State
+ end
+ end;
+ _ ->
+ %% Bad luck, try with a loader or sender instead
+ State2 = opt_start_sender(State),
+ opt_start_loader(State2)
+ end.
+
+opt_start_sender(State) ->
+ case State#state.sender_queue of
+ []->
+ %% No need
+ State;
+
+ _ when State#state.sender_pid /= undefined ->
+ %% Bad luck, a sender is already running
+ State;
+
+ [Sender | _SenderRest] ->
+ case State#state.loader_queue of
+ [Loader | _LoaderRest]
+ when State#state.loader_pid /= undefined,
+ Loader#net_load.table == Sender#send_table.table ->
+ %% A conflicting loader is running
+ State;
+ _ ->
+ SchemaQueue = State#state.dumper_queue,
+ case lists:keymember(schema_commit, 1, SchemaQueue) of
+ false ->
+
+ %% Start worker but keep him in the queue
+ Pid = spawn_link(?MODULE, send_and_reply,
+ [self(), Sender]),
+ State#state{sender_pid = Pid};
+ true ->
+ %% Bad luck, we must wait for the schema commit
+ State
+ end
+ end
+ end.
+
+opt_start_loader(State) ->
+ LoaderQueue = State#state.loader_queue,
+ if
+ LoaderQueue == [] ->
+ %% No need
+ State;
+
+ State#state.loader_pid /= undefined ->
+ %% Bad luck, an loader is already running
+ State;
+
+ true ->
+ SchemaQueue = State#state.dumper_queue,
+ case lists:keymember(schema_commit, 1, SchemaQueue) of
+ false ->
+ {Worker, Rest} = pick_next(LoaderQueue),
+
+ %% Start worker but keep him in the queue
+ Pid = spawn_link(?MODULE, load_and_reply, [self(), Worker]),
+ State#state{loader_pid = Pid,
+ loader_queue = [Worker | Rest]};
+ true ->
+ %% Bad luck, we must wait for the schema commit
+ State
+ end
+ end.
+
+start_remote_sender(Node, Tab, Receiver, Storage) ->
+ Msg = #send_table{table = Tab,
+ receiver_pid = Receiver,
+ remote_storage = Storage},
+ gen_server:cast({?SERVER_NAME, Node}, Msg).
+
+dump_and_reply(ReplyTo, Worker) ->
+ %% No trap_exit, die intentionally instead
+ Res = mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by),
+ ReplyTo ! #dumper_done{worker_pid = self(),
+ worker_res = Res},
+ unlink(ReplyTo),
+ exit(normal).
+
+send_and_reply(ReplyTo, Worker) ->
+ %% No trap_exit, die intentionally instead
+ Res = mnesia_loader:send_table(Worker#send_table.receiver_pid,
+ Worker#send_table.table,
+ Worker#send_table.remote_storage),
+ ReplyTo ! #sender_done{worker_pid = self(),
+ worker_res = Res},
+ unlink(ReplyTo),
+ exit(normal).
+
+
+load_and_reply(ReplyTo, Worker) ->
+ process_flag(trap_exit, true),
+ Done = load_table(Worker),
+ ReplyTo ! Done#loader_done{worker_pid = self()},
+ unlink(ReplyTo),
+ exit(normal).
+
+%% Now it is time to load the table
+%% but first we must check if it still is neccessary
+load_table(Load) when record(Load, net_load) ->
+ Tab = Load#net_load.table,
+ ReplyTo = Load#net_load.opt_reply_to,
+ Reason = Load#net_load.reason,
+ LocalC = val({Tab, local_content}),
+ AccessMode = val({Tab, access_mode}),
+ ReadNode = val({Tab, where_to_read}),
+ Active = filter_active(Tab),
+ Done = #loader_done{is_loaded = true,
+ table_name = Tab,
+ needs_announce = false,
+ needs_sync = false,
+ needs_reply = true,
+ reply_to = ReplyTo,
+ reply = {loaded, ok}
+ },
+ if
+ ReadNode == node() ->
+ %% Already loaded locally
+ Done;
+ LocalC == true ->
+ Res = mnesia_loader:disc_load_table(Tab, load_local_content),
+ Done#loader_done{reply = Res, needs_announce = true, needs_sync = true};
+ AccessMode == read_only ->
+ disc_load_table(Tab, Reason, ReplyTo);
+ true ->
+ %% Either we cannot read the table yet
+ %% or someone is moving a replica between
+ %% two nodes
+ Cs = Load#net_load.cstruct,
+ Res = mnesia_loader:net_load_table(Tab, Reason, Active, Cs),
+ case Res of
+ {loaded, ok} ->
+ Done#loader_done{needs_sync = true,
+ reply = Res};
+ {not_loaded, storage_unknown} ->
+ Done#loader_done{reply = Res};
+ {not_loaded, _} ->
+ Done#loader_done{is_loaded = false,
+ needs_reply = false,
+ reply = Res}
+ end
+ end;
+
+load_table(Load) when record(Load, disc_load) ->
+ Tab = Load#disc_load.table,
+ Reason = Load#disc_load.reason,
+ ReplyTo = Load#disc_load.opt_reply_to,
+ ReadNode = val({Tab, where_to_read}),
+ Active = filter_active(Tab),
+ Done = #loader_done{is_loaded = true,
+ table_name = Tab,
+ needs_announce = false,
+ needs_sync = false,
+ needs_reply = false
+ },
+ if
+ Active == [], ReadNode == nowhere ->
+ %% Not loaded anywhere, lets load it from disc
+ disc_load_table(Tab, Reason, ReplyTo);
+ ReadNode == nowhere ->
+ %% Already loaded on other node, lets get it
+ Cs = val({Tab, cstruct}),
+ case mnesia_loader:net_load_table(Tab, Reason, Active, Cs) of
+ {loaded, ok} ->
+ Done#loader_done{needs_sync = true};
+ {not_loaded, storage_unknown} ->
+ Done#loader_done{is_loaded = false};
+ {not_loaded, ErrReason} ->
+ Done#loader_done{is_loaded = false,
+ reply = {not_loaded,ErrReason}}
+ end;
+ true ->
+ %% Already readable, do not worry be happy
+ Done
+ end.
+
+disc_load_table(Tab, Reason, ReplyTo) ->
+ Done = #loader_done{is_loaded = true,
+ table_name = Tab,
+ needs_announce = false,
+ needs_sync = false,
+ needs_reply = true,
+ reply_to = ReplyTo,
+ reply = {loaded, ok}
+ },
+ Res = mnesia_loader:disc_load_table(Tab, Reason),
+ if
+ Res == {loaded, ok} ->
+ Done#loader_done{needs_announce = true,
+ needs_sync = true,
+ reply = Res};
+ ReplyTo /= undefined ->
+ Done#loader_done{is_loaded = false,
+ reply = Res};
+ true ->
+ fatal("Cannot load table ~p from disc: ~p~n", [Tab, Res])
+ end.
+
+filter_active(Tab) ->
+ ByForce = val({Tab, load_by_force}),
+ Active = val({Tab, active_replicas}),
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ do_filter_active(ByForce, Active, Masters).
+
+do_filter_active(true, Active, _Masters) ->
+ Active;
+do_filter_active(false, Active, []) ->
+ Active;
+do_filter_active(false, Active, Masters) ->
+ mnesia_lib:intersect(Active, Masters).
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_dumper.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_dumper.erl
new file mode 100644
index 0000000000..bbdb04589b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_dumper.erl
@@ -0,0 +1,1092 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_dumper.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_dumper).
+
+%% The InitBy arg may be one of the following:
+%% scan_decisions Initial scan for decisions
+%% startup Initial dump during startup
+%% schema_prepare Dump initiated during schema transaction preparation
+%% schema_update Dump initiated during schema transaction commit
+%% fast_schema_update A schema_update, but ignores the log file
+%% user Dump initiated by user
+%% write_threshold Automatic dump caused by too many log writes
+%% time_threshold Automatic dump caused by timeout
+
+%% Public interface
+-export([
+ get_log_writes/0,
+ incr_log_writes/0,
+ raw_dump_table/2,
+ raw_named_dump_table/2,
+ start_regulator/0,
+ opt_dump_log/1,
+ update/3
+ ]).
+
+ %% Internal stuff
+-export([regulator_init/1]).
+
+-include("mnesia.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-import(mnesia_lib, [fatal/2, dbg_out/2]).
+
+-define(REGULATOR_NAME, mnesia_dumper_load_regulator).
+-define(DumpToEtsMultiplier, 4).
+
+-record(state, {initiated_by = nobody,
+ dumper = nopid,
+ regulator_pid,
+ supervisor_pid,
+ queue = [],
+ timeout}).
+
+get_log_writes() ->
+ Max = mnesia_monitor:get_env(dump_log_write_threshold),
+ Prev = mnesia_lib:read_counter(trans_log_writes),
+ Left = mnesia_lib:read_counter(trans_log_writes_left),
+ Diff = Max - Left,
+ Prev + Diff.
+
+incr_log_writes() ->
+ Left = mnesia_lib:incr_counter(trans_log_writes_left, -1),
+ if
+ Left > 0 ->
+ ignore;
+ true ->
+ adjust_log_writes(true)
+ end.
+
+adjust_log_writes(DoCast) ->
+ Token = {mnesia_adjust_log_writes, self()},
+ case global:set_lock(Token, [node()], 1) of
+ false ->
+ ignore; %% Somebody else is sending a dump request
+ true ->
+ case DoCast of
+ false ->
+ ignore;
+ true ->
+ mnesia_controller:async_dump_log(write_threshold)
+ end,
+ Max = mnesia_monitor:get_env(dump_log_write_threshold),
+ Left = mnesia_lib:read_counter(trans_log_writes_left),
+ %% Don't care if we lost a few writes
+ mnesia_lib:set_counter(trans_log_writes_left, Max),
+ Diff = Max - Left,
+ mnesia_lib:incr_counter(trans_log_writes, Diff),
+ global:del_lock(Token, [node()])
+ end.
+
+%% Returns 'ok' or exits
+opt_dump_log(InitBy) ->
+ Reg = case whereis(?REGULATOR_NAME) of
+ undefined ->
+ nopid;
+ Pid when pid(Pid) ->
+ Pid
+ end,
+ perform_dump(InitBy, Reg).
+
+%% Scan for decisions
+perform_dump(InitBy, Regulator) when InitBy == scan_decisions ->
+ ?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
+
+ dbg_out("Transaction log dump initiated by ~w~n", [InitBy]),
+ scan_decisions(mnesia_log:previous_log_file(), InitBy, Regulator),
+ scan_decisions(mnesia_log:latest_log_file(), InitBy, Regulator);
+
+%% Propagate the log into the DAT-files
+perform_dump(InitBy, Regulator) ->
+ ?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
+ LogState = mnesia_log:prepare_log_dump(InitBy),
+ dbg_out("Transaction log dump initiated by ~w: ~w~n",
+ [InitBy, LogState]),
+ adjust_log_writes(false),
+ mnesia_recover:allow_garb(),
+ case LogState of
+ already_dumped ->
+ dumped;
+ {needs_dump, Diff} ->
+ U = mnesia_monitor:get_env(dump_log_update_in_place),
+ Cont = mnesia_log:init_log_dump(),
+ case catch do_perform_dump(Cont, U, InitBy, Regulator, undefined) of
+ ok ->
+ ?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_recover:dump_decision_tab();
+ false ->
+ mnesia_log:purge_some_logs()
+ end,
+ %% And now to the crucial point...
+ mnesia_log:confirm_log_dump(Diff);
+ {error, Reason} ->
+ {error, Reason};
+ {'EXIT', {Desc, Reason}} ->
+ case mnesia_monitor:get_env(auto_repair) of
+ true ->
+ mnesia_lib:important(Desc, Reason),
+ %% Ignore rest of the log
+ mnesia_log:confirm_log_dump(Diff);
+ false ->
+ fatal(Desc, Reason)
+ end
+ end;
+ {error, Reason} ->
+ {error, {"Cannot prepare log dump", Reason}}
+ end.
+
+scan_decisions(Fname, InitBy, Regulator) ->
+ Exists = mnesia_lib:exists(Fname),
+ case Exists of
+ false ->
+ ok;
+ true ->
+ Header = mnesia_log:trans_log_header(),
+ Name = previous_log,
+ mnesia_log:open_log(Name, Header, Fname, Exists,
+ mnesia_monitor:get_env(auto_repair), read_only),
+ Cont = start,
+ Res = (catch do_perform_dump(Cont, false, InitBy, Regulator, undefined)),
+ mnesia_log:close_log(Name),
+ case Res of
+ ok -> ok;
+ {'EXIT', Reason} -> {error, Reason}
+ end
+ end.
+
+do_perform_dump(Cont, InPlace, InitBy, Regulator, OldVersion) ->
+ case mnesia_log:chunk_log(Cont) of
+ {C2, Recs} ->
+ case catch insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of
+ {'EXIT', R} ->
+ Reason = {"Transaction log dump error: ~p~n", [R]},
+ close_files(InPlace, {error, Reason}, InitBy),
+ exit(Reason);
+ Version ->
+ do_perform_dump(C2, InPlace, InitBy, Regulator, Version)
+ end;
+ eof ->
+ close_files(InPlace, ok, InitBy),
+ ok
+ end.
+
+insert_recs([Rec | Recs], InPlace, InitBy, Regulator, LogV) ->
+ regulate(Regulator),
+ case insert_rec(Rec, InPlace, InitBy, LogV) of
+ LogH when record(LogH, log_header) ->
+ insert_recs(Recs, InPlace, InitBy, Regulator, LogH#log_header.log_version);
+ _ ->
+ insert_recs(Recs, InPlace, InitBy, Regulator, LogV)
+ end;
+
+insert_recs([], _InPlace, _InitBy, _Regulator, Version) ->
+ Version.
+
+insert_rec(Rec, _InPlace, scan_decisions, _LogV) ->
+ if
+ record(Rec, commit) ->
+ ignore;
+ record(Rec, log_header) ->
+ ignore;
+ true ->
+ mnesia_recover:note_log_decision(Rec, scan_decisions)
+ end;
+insert_rec(Rec, InPlace, InitBy, LogV) when record(Rec, commit) ->
+ %% Determine the Outcome of the transaction and recover it
+ D = Rec#commit.decision,
+ case mnesia_recover:wait_for_decision(D, InitBy) of
+ {Tid, committed} ->
+ do_insert_rec(Tid, Rec, InPlace, InitBy, LogV);
+ {Tid, aborted} ->
+ mnesia_schema:undo_prepare_commit(Tid, Rec)
+ end;
+insert_rec(H, _InPlace, _InitBy, _LogV) when record(H, log_header) ->
+ CurrentVersion = mnesia_log:version(),
+ if
+ H#log_header.log_kind /= trans_log ->
+ exit({"Bad kind of transaction log", H});
+ H#log_header.log_version == CurrentVersion ->
+ ok;
+ H#log_header.log_version == "4.2" ->
+ ok;
+ H#log_header.log_version == "4.1" ->
+ ok;
+ H#log_header.log_version == "4.0" ->
+ ok;
+ true ->
+ fatal("Bad version of transaction log: ~p~n", [H])
+ end,
+ H;
+
+insert_rec(_Rec, _InPlace, _InitBy, _LogV) ->
+ ok.
+
+do_insert_rec(Tid, Rec, InPlace, InitBy, LogV) ->
+ case Rec#commit.schema_ops of
+ [] ->
+ ignore;
+ SchemaOps ->
+ case val({schema, storage_type}) of
+ ram_copies ->
+ insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV);
+ Storage ->
+ true = open_files(schema, Storage, InPlace, InitBy),
+ insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV)
+ end
+ end,
+ D = Rec#commit.disc_copies,
+ insert_ops(Tid, disc_copies, D, InPlace, InitBy, LogV),
+ case InitBy of
+ startup ->
+ DO = Rec#commit.disc_only_copies,
+ insert_ops(Tid, disc_only_copies, DO, InPlace, InitBy, LogV);
+ _ ->
+ ignore
+ end.
+
+
+update(_Tid, [], _DumperMode) ->
+ dumped;
+update(Tid, SchemaOps, DumperMode) ->
+ UseDir = mnesia_monitor:use_dir(),
+ Res = perform_update(Tid, SchemaOps, DumperMode, UseDir),
+ mnesia_controller:release_schema_commit_lock(),
+ Res.
+
+perform_update(_Tid, _SchemaOps, mandatory, true) ->
+ %% Force a dump of the transaction log in order to let the
+ %% dumper perform needed updates
+
+ InitBy = schema_update,
+ ?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
+ opt_dump_log(InitBy);
+perform_update(Tid, SchemaOps, _DumperMode, _UseDir) ->
+ %% No need for a full transaction log dump.
+ %% Ignore the log file and perform only perform
+ %% the corresponding updates.
+
+ InitBy = fast_schema_update,
+ InPlace = mnesia_monitor:get_env(dump_log_update_in_place),
+ ?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
+ case catch insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy,
+ mnesia_log:version()) of
+ {'EXIT', Reason} ->
+ Error = {error, {"Schema update error", Reason}},
+ close_files(InPlace, Error, InitBy),
+ fatal("Schema update error ~p ~p", [Reason, SchemaOps]);
+ _ ->
+ ?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
+ close_files(InPlace, ok, InitBy),
+ ok
+ end.
+
+insert_ops(_Tid, _Storage, [], _InPlace, _InitBy, _) -> ok;
+insert_ops(Tid, Storage, [Op], InPlace, InitBy, Ver) when Ver >= "4.3"->
+ insert_op(Tid, Storage, Op, InPlace, InitBy),
+ ok;
+insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver >= "4.3"->
+ insert_op(Tid, Storage, Op, InPlace, InitBy),
+ insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver);
+insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver < "4.3" ->
+ insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver),
+ insert_op(Tid, Storage, Op, InPlace, InitBy).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Normal ops
+
+disc_insert(_Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
+ case open_files(Tab, Storage, InPlace, InitBy) of
+ true ->
+ case Storage of
+ disc_copies when Tab /= schema ->
+ mnesia_log:append({?MODULE,Tab}, {{Tab, Key}, Val, Op}),
+ ok;
+ _ ->
+ case Op of
+ write ->
+ ok = dets:insert(Tab, Val);
+ delete ->
+ ok = dets:delete(Tab, Key);
+ update_counter ->
+ {RecName, Incr} = Val,
+ case catch dets:update_counter(Tab, Key, Incr) of
+ CounterVal when integer(CounterVal) ->
+ ok;
+ _ ->
+ Zero = {RecName, Key, 0},
+ ok = dets:insert(Tab, Zero)
+ end;
+ delete_object ->
+ ok = dets:delete_object(Tab, Val);
+ clear_table ->
+ ok = dets:match_delete(Tab, '_')
+ end
+ end;
+ false ->
+ ignore
+ end.
+
+insert(Tid, Storage, Tab, Key, [Val | Tail], Op, InPlace, InitBy) ->
+ insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
+ insert(Tid, Storage, Tab, Key, Tail, Op, InPlace, InitBy);
+
+insert(_Tid, _Storage, _Tab, _Key, [], _Op, _InPlace, _InitBy) ->
+ ok;
+
+insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
+ Item = {{Tab, Key}, Val, Op},
+ case InitBy of
+ startup ->
+ disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
+
+ _ when Storage == ram_copies ->
+ mnesia_tm:do_update_op(Tid, Storage, Item),
+ Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
+ mnesia_tm:do_snmp(Tid, Snmp);
+
+ _ when Storage == disc_copies ->
+ disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
+ mnesia_tm:do_update_op(Tid, Storage, Item),
+ Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
+ mnesia_tm:do_snmp(Tid, Snmp);
+
+ _ when Storage == disc_only_copies ->
+ mnesia_tm:do_update_op(Tid, Storage, Item),
+ Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
+ mnesia_tm:do_snmp(Tid, Snmp);
+
+ _ when Storage == unknown ->
+ ignore
+ end.
+
+disc_delete_table(Tab, Storage) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ if
+ Storage == disc_only_copies; Tab == schema ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ Dat = mnesia_lib:tab2dat(Tab),
+ file:delete(Dat);
+ true ->
+ DclFile = mnesia_lib:tab2dcl(Tab),
+ case get({?MODULE,Tab}) of
+ {opened_dumper, dcl} ->
+ del_opened_tab(Tab),
+ mnesia_log:unsafe_close_log(Tab);
+ _ ->
+ ok
+ end,
+ file:delete(DclFile),
+ DcdFile = mnesia_lib:tab2dcd(Tab),
+ file:delete(DcdFile),
+ ok
+ end,
+ erase({?MODULE, Tab});
+ false ->
+ ignore
+ end.
+
+disc_delete_indecies(_Tab, _Cs, Storage) when Storage /= disc_only_copies ->
+ ignore;
+disc_delete_indecies(Tab, Cs, disc_only_copies) ->
+ Indecies = Cs#cstruct.index,
+ mnesia_index:del_transient(Tab, Indecies, disc_only_copies).
+
+insert_op(Tid, Storage, {{Tab, Key}, Val, Op}, InPlace, InitBy) ->
+ %% Propagate to disc only
+ disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% NOTE that all operations below will only
+%% be performed if the dump is initiated by
+%% startup or fast_schema_update
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+insert_op(_Tid, schema_ops, _OP, _InPlace, Initby)
+ when Initby /= startup,
+ Initby /= fast_schema_update,
+ Initby /= schema_update ->
+ ignore;
+
+insert_op(Tid, _, {op, rec, Storage, Item}, InPlace, InitBy) ->
+ {{Tab, Key}, ValList, Op} = Item,
+ insert(Tid, Storage, Tab, Key, ValList, Op, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_copy_type, N, FromS, ToS, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Val = mnesia_schema:insert_cstruct(Tid, Cs, true), % Update ram only
+ {schema, Tab, _} = Val,
+ if
+ InitBy /= startup ->
+ mnesia_controller:add_active_replica(Tab, N, Cs);
+ true ->
+ ignore
+ end,
+ if
+ N == node() ->
+ Dmp = mnesia_lib:tab2dmp(Tab),
+ Dat = mnesia_lib:tab2dat(Tab),
+ Dcd = mnesia_lib:tab2dcd(Tab),
+ Dcl = mnesia_lib:tab2dcl(Tab),
+ case {FromS, ToS} of
+ {ram_copies, disc_copies} when Tab == schema ->
+ ok = ensure_rename(Dmp, Dat);
+ {ram_copies, disc_copies} ->
+ file:delete(Dcl),
+ ok = ensure_rename(Dmp, Dcd);
+ {disc_copies, ram_copies} when Tab == schema ->
+ mnesia_lib:set(use_dir, false),
+ mnesia_monitor:unsafe_close_dets(Tab),
+ file:delete(Dat);
+ {disc_copies, ram_copies} ->
+ file:delete(Dcl),
+ file:delete(Dcd);
+ {ram_copies, disc_only_copies} ->
+ ok = ensure_rename(Dmp, Dat),
+ true = open_files(Tab, disc_only_copies, InPlace, InitBy),
+ %% ram_delete_table must be done before init_indecies,
+ %% it uses info which is reset in init_indecies,
+ %% it doesn't matter, because init_indecies don't use
+ %% the ram replica of the table when creating the disc
+ %% index; Could be improved :)
+ mnesia_schema:ram_delete_table(Tab, FromS),
+ PosList = Cs#cstruct.index,
+ mnesia_index:init_indecies(Tab, disc_only_copies, PosList);
+ {disc_only_copies, ram_copies} ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ disc_delete_indecies(Tab, Cs, disc_only_copies),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_controller:get_disc_copy(Tab)
+ end,
+ disc_delete_table(Tab, disc_only_copies);
+ {disc_copies, disc_only_copies} ->
+ ok = ensure_rename(Dmp, Dat),
+ true = open_files(Tab, disc_only_copies, InPlace, InitBy),
+ mnesia_schema:ram_delete_table(Tab, FromS),
+ PosList = Cs#cstruct.index,
+ mnesia_index:init_indecies(Tab, disc_only_copies, PosList),
+ file:delete(Dcl),
+ file:delete(Dcd);
+ {disc_only_copies, disc_copies} ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ disc_delete_indecies(Tab, Cs, disc_only_copies),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_log:ets2dcd(Tab),
+ mnesia_controller:get_disc_copy(Tab),
+ disc_delete_table(Tab, disc_only_copies)
+ end
+ end;
+ true ->
+ ignore
+ end,
+ S = val({schema, storage_type}),
+ disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy);
+
+insert_op(Tid, _, {op, transform, _Fun, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ disc_copies ->
+ open_dcl(Cs#cstruct.name);
+ _ ->
+ ignore
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+%%% Operations below this are handled without using the logg.
+
+insert_op(Tid, _, {op, restore_recreate, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Type = Cs#cstruct.type,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ %% Delete all possbibly existing files and tables
+ disc_delete_table(Tab, Storage),
+ disc_delete_indecies(Tab, Cs, Storage),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_schema:ram_delete_table(Tab, Storage),
+ mnesia_checkpoint:tm_del_copy(Tab, node())
+ end,
+ %% delete_cstruct(Tid, Cs, InPlace, InitBy),
+ %% And create new ones..
+ if
+ (InitBy == startup) or (Storage == unknown) ->
+ ignore;
+ Storage == ram_copies ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ mnesia_monitor:mktab(Tab, Args);
+ Storage == disc_copies ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ mnesia_monitor:mktab(Tab, Args),
+ File = mnesia_lib:tab2dcd(Tab),
+ FArg = [{file, File}, {name, {mnesia,create}},
+ {repair, false}, {mode, read_write}],
+ {ok, Log} = mnesia_monitor:open_log(FArg),
+ mnesia_monitor:unsafe_close_log(Log);
+ Storage == disc_only_copies ->
+ File = mnesia_lib:tab2dat(Tab),
+ file:delete(File),
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ mnesia_monitor:open_dets(Tab, Args)
+ end,
+ insert_op(Tid, ignore, {op, create_table, TabDef}, InPlace, InitBy);
+
+insert_op(Tid, _, {op, create_table, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, false, InPlace, InitBy),
+ Tab = Cs#cstruct.name,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ case InitBy of
+ startup ->
+ case Storage of
+ unknown ->
+ ignore;
+ ram_copies ->
+ ignore;
+ disc_copies ->
+ Dcd = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dcd) of
+ true -> ignore;
+ false ->
+ mnesia_log:open_log(temp,
+ mnesia_log:dcl_log_header(),
+ Dcd,
+ false,
+ false,
+ read_write),
+ mnesia_log:unsafe_close_log(temp)
+ end;
+ _ ->
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ case mnesia_monitor:open_dets(Tab, Args) of
+ {ok, _} ->
+ mnesia_monitor:unsafe_close_dets(Tab);
+ {error, Error} ->
+ exit({"Failed to create dets table", Error})
+ end
+ end;
+ _ ->
+ Copies = mnesia_lib:copy_holders(Cs),
+ Active = mnesia_lib:intersect(Copies, val({current, db_nodes})),
+ [mnesia_controller:add_active_replica(Tab, N, Cs) || N <- Active],
+
+ case Storage of
+ unknown ->
+ case Cs#cstruct.local_content of
+ true ->
+ ignore;
+ false ->
+ mnesia_lib:set_remote_where_to_read(Tab)
+ end;
+ _ ->
+ case Cs#cstruct.local_content of
+ true ->
+ mnesia_lib:set_local_content_whereabouts(Tab);
+ false ->
+ mnesia_lib:set({Tab, where_to_read}, node())
+ end,
+ case Storage of
+ ram_copies ->
+ ignore;
+ _ ->
+ %% Indecies are still created by loader
+ disc_delete_indecies(Tab, Cs, Storage)
+ %% disc_delete_table(Tab, Storage)
+ end,
+
+ %% Update whereabouts and create table
+ mnesia_controller:create_table(Tab)
+ end
+ end;
+
+insert_op(_Tid, _, {op, dump_table, Size, TabDef}, _InPlace, _InitBy) ->
+ case Size of
+ unknown ->
+ ignore;
+ _ ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Dmp = mnesia_lib:tab2dmp(Tab),
+ Dat = mnesia_lib:tab2dcd(Tab),
+ case Size of
+ 0 ->
+ %% Assume that table files already are closed
+ file:delete(Dmp),
+ file:delete(Dat);
+ _ ->
+ ok = ensure_rename(Dmp, Dat)
+ end
+ end;
+
+insert_op(Tid, _, {op, delete_table, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ ignore;
+ Storage ->
+ disc_delete_table(Tab, Storage),
+ disc_delete_indecies(Tab, Cs, Storage),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_schema:ram_delete_table(Tab, Storage),
+ mnesia_checkpoint:tm_del_copy(Tab, node())
+ end
+ end,
+ delete_cstruct(Tid, Cs, InPlace, InitBy);
+
+insert_op(Tid, _, {op, clear_table, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ ignore;
+ Storage ->
+ Oid = '_', %%val({Tab, wild_pattern}),
+ if Storage == disc_copies ->
+ open_dcl(Cs#cstruct.name);
+ true ->
+ ignore
+ end,
+ insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy)
+ end;
+
+insert_op(Tid, _, {op, merge_schema, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, false, InPlace, InitBy);
+
+insert_op(Tid, _, {op, del_table_copy, Storage, Node, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ if
+ Tab == schema, Storage == ram_copies ->
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+ Tab /= schema ->
+ mnesia_controller:del_active_replica(Tab, Node),
+ mnesia_lib:del({Tab, Storage}, Node),
+ if
+ Node == node() ->
+ case Cs#cstruct.local_content of
+ true -> mnesia_lib:set({Tab, where_to_read}, nowhere);
+ false -> mnesia_lib:set_remote_where_to_read(Tab)
+ end,
+ mnesia_lib:del({schema, local_tables}, Tab),
+ mnesia_lib:set({Tab, storage_type}, unknown),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy),
+ disc_delete_table(Tab, Storage),
+ disc_delete_indecies(Tab, Cs, Storage),
+ mnesia_schema:ram_delete_table(Tab, Storage),
+ mnesia_checkpoint:tm_del_copy(Tab, Node);
+ true ->
+ case val({Tab, where_to_read}) of
+ Node ->
+ mnesia_lib:set_remote_where_to_read(Tab);
+ _ ->
+ ignore
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy)
+ end
+ end;
+
+insert_op(Tid, _, {op, add_table_copy, _Storage, _Node, TabDef}, InPlace, InitBy) ->
+ %% During prepare commit, the files was created
+ %% and the replica was announced
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, add_snmp, _Us, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, del_snmp, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ if
+ InitBy /= startup,
+ Storage /= unknown ->
+ case ?catch_val({Tab, {index, snmp}}) of
+ {'EXIT', _} ->
+ ignore;
+ Stab ->
+ mnesia_snmp_hook:delete_table(Tab, Stab),
+ mnesia_lib:unset({Tab, {index, snmp}})
+ end;
+ true ->
+ ignore
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, add_index, Pos, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = insert_cstruct(Tid, Cs, true, InPlace, InitBy),
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ case InitBy of
+ startup when Storage == disc_only_copies ->
+ mnesia_index:init_indecies(Tab, Storage, [Pos]);
+ startup ->
+ ignore;
+ _ ->
+ mnesia_index:init_indecies(Tab, Storage, [Pos])
+ end;
+
+insert_op(Tid, _, {op, del_index, Pos, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ case InitBy of
+ startup when Storage == disc_only_copies ->
+ mnesia_index:del_index_table(Tab, Storage, Pos);
+ startup ->
+ ignore;
+ _ ->
+ mnesia_index:del_index_table(Tab, Storage, Pos)
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_access_mode,TabDef, _OldAccess, _Access}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ case InitBy of
+ startup -> ignore;
+ _ -> mnesia_controller:change_table_access_mode(Cs)
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_load_order, TabDef, _OldLevel, _Level}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, delete_property, TabDef, PropKey}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_lib:unset({Tab, user_property, PropKey}),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, write_property, TabDef, _Prop}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_frag, _Change, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy).
+
+open_files(Tab, Storage, UpdateInPlace, InitBy)
+ when Storage /= unknown, Storage /= ram_copies ->
+ case get({?MODULE, Tab}) of
+ undefined ->
+ case ?catch_val({Tab, setorbag}) of
+ {'EXIT', _} ->
+ false;
+ Type ->
+ case Storage of
+ disc_copies when Tab /= schema ->
+ Bool = open_disc_copies(Tab, InitBy),
+ Bool;
+ _ ->
+ Fname = prepare_open(Tab, UpdateInPlace),
+ Args = [{file, Fname},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, mnesia_lib:disk_type(Tab, Type)}],
+ {ok, _} = mnesia_monitor:open_dets(Tab, Args),
+ put({?MODULE, Tab}, {opened_dumper, dat}),
+ true
+ end
+ end;
+ already_dumped ->
+ false;
+ {opened_dumper, _} ->
+ true
+ end;
+open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) ->
+ false.
+
+open_disc_copies(Tab, InitBy) ->
+ DclF = mnesia_lib:tab2dcl(Tab),
+ DumpEts =
+ case file:read_file_info(DclF) of
+ {error, enoent} ->
+ false;
+ {ok, DclInfo} ->
+ DcdF = mnesia_lib:tab2dcd(Tab),
+ case file:read_file_info(DcdF) of
+ {error, Reason} ->
+ mnesia_lib:dbg_out("File ~p info_error ~p ~n",
+ [DcdF, Reason]),
+ true;
+ {ok, DcdInfo} ->
+ DcdInfo#file_info.size =<
+ (DclInfo#file_info.size *
+ ?DumpToEtsMultiplier)
+ end
+ end,
+ if
+ DumpEts == false; InitBy == startup ->
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ DclF,
+ mnesia_lib:exists(DclF),
+ mnesia_monitor:get_env(auto_repair),
+ read_write),
+ put({?MODULE, Tab}, {opened_dumper, dcl}),
+ true;
+ true ->
+ mnesia_log:ets2dcd(Tab),
+ put({?MODULE, Tab}, already_dumped),
+ false
+ end.
+
+%% Always opens the dcl file for writing overriding already_dumped
+%% mechanismen, used for schema transactions.
+open_dcl(Tab) ->
+ case get({?MODULE, Tab}) of
+ {opened_dumper, _} ->
+ true;
+ _ -> %% undefined or already_dumped
+ DclF = mnesia_lib:tab2dcl(Tab),
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ DclF,
+ mnesia_lib:exists(DclF),
+ mnesia_monitor:get_env(auto_repair),
+ read_write),
+ put({?MODULE, Tab}, {opened_dumper, dcl}),
+ true
+ end.
+
+prepare_open(Tab, UpdateInPlace) ->
+ Dat = mnesia_lib:tab2dat(Tab),
+ case UpdateInPlace of
+ true ->
+ Dat;
+ false ->
+ Tmp = mnesia_lib:tab2tmp(Tab),
+ case catch mnesia_lib:copy_file(Dat, Tmp) of
+ ok ->
+ Tmp;
+ Error ->
+ fatal("Cannot copy dets file ~p to ~p: ~p~n",
+ [Dat, Tmp, Error])
+ end
+ end.
+
+del_opened_tab(Tab) ->
+ erase({?MODULE, Tab}).
+
+close_files(UpdateInPlace, Outcome, InitBy) -> % Update in place
+ close_files(UpdateInPlace, Outcome, InitBy, get()).
+
+close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, already_dumped} | Tail]) ->
+ erase({?MODULE, Tab}),
+ close_files(InPlace, Outcome, InitBy, Tail);
+close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, {opened_dumper, Type}} | Tail]) ->
+ erase({?MODULE, Tab}),
+ case val({Tab, storage_type}) of
+ disc_only_copies when InitBy /= startup ->
+ ignore;
+ disc_copies when Tab /= schema ->
+ mnesia_log:close_log({?MODULE,Tab});
+ Storage ->
+ do_close(InPlace, Outcome, Tab, Type, Storage)
+ end,
+ close_files(InPlace, Outcome, InitBy, Tail);
+
+close_files(InPlace, Outcome, InitBy, [_ | Tail]) ->
+ close_files(InPlace, Outcome, InitBy, Tail);
+close_files(_, _, _InitBy, []) ->
+ ok.
+
+%% If storage is unknown during close clean up files, this can happen if timing
+%% is right and dirty_write conflicts with schema operations.
+do_close(_, _, Tab, dcl, unknown) ->
+ mnesia_log:close_log({?MODULE,Tab}),
+ file:delete(mnesia_lib:tab2dcl(Tab));
+do_close(_, _, Tab, dcl, _) -> %% To be safe, can it happen?
+ mnesia_log:close_log({?MODULE,Tab});
+
+do_close(InPlace, Outcome, Tab, dat, Storage) ->
+ mnesia_monitor:close_dets(Tab),
+ if
+ Storage == unknown, InPlace == true ->
+ file:delete(mnesia_lib:tab2dat(Tab));
+ InPlace == true ->
+ %% Update in place
+ ok;
+ Outcome == ok, Storage /= unknown ->
+ %% Success: swap tmp files with dat files
+ TabDat = mnesia_lib:tab2dat(Tab),
+ ok = file:rename(mnesia_lib:tab2tmp(Tab), TabDat);
+ true ->
+ file:delete(mnesia_lib:tab2tmp(Tab))
+ end.
+
+
+ensure_rename(From, To) ->
+ case mnesia_lib:exists(From) of
+ true ->
+ file:rename(From, To);
+ false ->
+ case mnesia_lib:exists(To) of
+ true ->
+ ok;
+ false ->
+ {error, {rename_failed, From, To}}
+ end
+ end.
+
+insert_cstruct(Tid, Cs, KeepWhereabouts, InPlace, InitBy) ->
+ Val = mnesia_schema:insert_cstruct(Tid, Cs, KeepWhereabouts),
+ {schema, Tab, _} = Val,
+ S = val({schema, storage_type}),
+ disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy),
+ Tab.
+
+delete_cstruct(Tid, Cs, InPlace, InitBy) ->
+ Val = mnesia_schema:delete_cstruct(Tid, Cs),
+ {schema, Tab, _} = Val,
+ S = val({schema, storage_type}),
+ disc_insert(Tid, S, schema, Tab, Val, delete, InPlace, InitBy),
+ Tab.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Raw dump of table. Dumper must have unique access to the ets table.
+
+raw_named_dump_table(Tab, Ftype) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_lib:lock_table(Tab),
+ TmpFname = mnesia_lib:tab2tmp(Tab),
+ Fname =
+ case Ftype of
+ dat -> mnesia_lib:tab2dat(Tab);
+ dmp -> mnesia_lib:tab2dmp(Tab)
+ end,
+ file:delete(TmpFname),
+ file:delete(Fname),
+ TabSize = ?ets_info(Tab, size),
+ TabRef = Tab,
+ DiskType = mnesia_lib:disk_type(Tab),
+ Args = [{file, TmpFname},
+ {keypos, 2},
+ %% {ram_file, true},
+ {estimated_no_objects, TabSize + 256},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, DiskType}],
+ case mnesia_lib:dets_sync_open(TabRef, Args) of
+ {ok, TabRef} ->
+ Storage = ram_copies,
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+
+ case catch raw_dump_table(TabRef, Tab) of
+ {'EXIT', Reason} ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_lib:dets_sync_close(Tab),
+ file:delete(TmpFname),
+ mnesia_lib:unlock_table(Tab),
+ exit({"Dump of table to disc failed", Reason});
+ ok ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_lib:dets_sync_close(Tab),
+ mnesia_lib:unlock_table(Tab),
+ ok = file:rename(TmpFname, Fname)
+ end;
+ {error, Reason} ->
+ mnesia_lib:unlock_table(Tab),
+ exit({"Open of file before dump to disc failed", Reason})
+ end;
+ false ->
+ exit({has_no_disc, node()})
+ end.
+
+raw_dump_table(DetsRef, EtsRef) ->
+ dets:from_ets(DetsRef, EtsRef).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Load regulator
+%%
+%% This is a poor mans substitute for a fair scheduler algorithm
+%% in the Erlang emulator. The mnesia_dumper process performs many
+%% costly BIF invokations and must pay for this. But since the
+%% Emulator does not handle this properly we must compensate for
+%% this with some form of load regulation of ourselves in order to
+%% not steal all computation power in the Erlang Emulator ans make
+%% other processes starve. Hopefully this is a temporary solution.
+
+start_regulator() ->
+ case mnesia_monitor:get_env(dump_log_load_regulation) of
+ false ->
+ nopid;
+ true ->
+ N = ?REGULATOR_NAME,
+ case mnesia_monitor:start_proc(N, ?MODULE, regulator_init, [self()]) of
+ {ok, Pid} ->
+ Pid;
+ {error, Reason} ->
+ fatal("Failed to start ~n: ~p~n", [N, Reason])
+ end
+ end.
+
+regulator_init(Parent) ->
+ %% No need for trapping exits.
+ %% Using low priority causes the regulation
+ process_flag(priority, low),
+ register(?REGULATOR_NAME, self()),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ regulator_loop().
+
+regulator_loop() ->
+ receive
+ {regulate, From} ->
+ From ! {regulated, self()},
+ regulator_loop();
+ {stop, From} ->
+ From ! {stopped, self()},
+ exit(normal)
+ end.
+
+regulate(nopid) ->
+ ok;
+regulate(RegulatorPid) ->
+ RegulatorPid ! {regulate, self()},
+ receive
+ {regulated, RegulatorPid} -> ok
+ end.
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_event.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_event.erl
new file mode 100644
index 0000000000..fc0638e1ad
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_event.erl
@@ -0,0 +1,263 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_event.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_event).
+
+-behaviour(gen_event).
+%-behaviour(mnesia_event).
+
+%% gen_event callback interface
+-export([init/1,
+ handle_event/2,
+ handle_call/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+-record(state, {nodes = [],
+ dumped_core = false, %% only dump fatal core once
+ args}).
+
+%%%----------------------------------------------------------------
+%%% Callback functions from gen_server
+%%%----------------------------------------------------------------
+
+%%-----------------------------------------------------------------
+%% init(Args) ->
+%% {ok, State} | Error
+%%-----------------------------------------------------------------
+
+init(Args) ->
+ {ok, #state{args = Args}}.
+
+%%-----------------------------------------------------------------
+%% handle_event(Event, State) ->
+%% {ok, NewState} | remove_handler |
+%% {swap_handler, Args1, State1, Mod2, Args2}
+%%-----------------------------------------------------------------
+
+handle_event(Event, State) ->
+ handle_any_event(Event, State).
+
+%%-----------------------------------------------------------------
+%% handle_info(Msg, State) ->
+%% {ok, NewState} | remove_handler |
+%% {swap_handler, Args1, State1, Mod2, Args2}
+%%-----------------------------------------------------------------
+
+handle_info(Msg, State) ->
+ handle_any_event(Msg, State),
+ {ok, State}.
+
+%%-----------------------------------------------------------------
+%% handle_call(Event, State) ->
+%% {ok, Reply, NewState} | {remove_handler, Reply} |
+%% {swap_handler, Reply, Args1, State1, Mod2, Args2}
+%%-----------------------------------------------------------------
+
+handle_call(Msg, State) ->
+ Reply = ok,
+ case handle_any_event(Msg, State) of
+ {ok, NewState} ->
+ {ok, Reply, NewState};
+ remove_handler ->
+ {remove_handler, Reply};
+ {swap_handler,Args1, State1, Mod2, Args2} ->
+ {swap_handler, Reply, Args1, State1, Mod2, Args2}
+ end.
+
+%%-----------------------------------------------------------------
+%% terminate(Reason, State) ->
+%% AnyVal
+%%-----------------------------------------------------------------
+
+terminate(_Reason, _State) ->
+ ok.
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%-----------------------------------------------------------------
+%% Internal functions
+%%-----------------------------------------------------------------
+
+handle_any_event({mnesia_system_event, Event}, State) ->
+ handle_system_event(Event, State);
+handle_any_event({mnesia_table_event, Event}, State) ->
+ handle_table_event(Event, State);
+handle_any_event(Msg, State) ->
+ report_error("~p got unexpected event: ~p~n", [?MODULE, Msg]),
+ {ok, State}.
+
+handle_table_event({Oper, Record, TransId}, State) ->
+ report_info("~p performed by ~p on record:~n\t~p~n",
+ [Oper, TransId, Record]),
+ {ok, State}.
+
+handle_system_event({mnesia_checkpoint_activated, _Checkpoint}, State) ->
+ {ok, State};
+
+handle_system_event({mnesia_checkpoint_deactivated, _Checkpoint}, State) ->
+ {ok, State};
+
+handle_system_event({mnesia_up, Node}, State) ->
+ Nodes = [Node | State#state.nodes],
+ {ok, State#state{nodes = Nodes}};
+
+handle_system_event({mnesia_down, Node}, State) ->
+ case mnesia:system_info(fallback_activated) of
+ true ->
+ case mnesia_monitor:get_env(fallback_error_function) of
+ {mnesia, lkill} ->
+ Msg = "A fallback is installed and Mnesia "
+ "must be restarted. Forcing shutdown "
+ "after mnesia_down from ~p...~n",
+ report_fatal(Msg, [Node], nocore, State#state.dumped_core),
+ mnesia:lkill(),
+ exit(fatal);
+ {UserMod, UserFunc} ->
+ Msg = "Warning: A fallback is installed and Mnesia got mnesia_down "
+ "from ~p. ~n",
+ report_info(Msg, [Node]),
+ case catch apply(UserMod, UserFunc, [Node]) of
+ {'EXIT', {undef, _Reason}} ->
+ %% Backward compatibility
+ apply(UserMod, UserFunc, []);
+ {'EXIT', Reason} ->
+ exit(Reason);
+ _ ->
+ ok
+ end,
+ Nodes = lists:delete(Node, State#state.nodes),
+ {ok, State#state{nodes = Nodes}}
+ end;
+ false ->
+ Nodes = lists:delete(Node, State#state.nodes),
+ {ok, State#state{nodes = Nodes}}
+ end;
+
+handle_system_event({mnesia_overload, Details}, State) ->
+ report_warning("Mnesia is overloaded: ~p~n", [Details]),
+ {ok, State};
+
+handle_system_event({mnesia_info, Format, Args}, State) ->
+ report_info(Format, Args),
+ {ok, State};
+
+handle_system_event({mnesia_warning, Format, Args}, State) ->
+ report_warning(Format, Args),
+ {ok, State};
+
+handle_system_event({mnesia_error, Format, Args}, State) ->
+ report_error(Format, Args),
+ {ok, State};
+
+handle_system_event({mnesia_fatal, Format, Args, BinaryCore}, State) ->
+ report_fatal(Format, Args, BinaryCore, State#state.dumped_core),
+ {ok, State#state{dumped_core = true}};
+
+handle_system_event({inconsistent_database, Reason, Node}, State) ->
+ report_error("mnesia_event got {inconsistent_database, ~w, ~w}~n",
+ [Reason, Node]),
+ {ok, State};
+
+handle_system_event({mnesia_user, Event}, State) ->
+ report_info("User event: ~p~n", [Event]),
+ {ok, State};
+
+handle_system_event(Msg, State) ->
+ report_error("mnesia_event got unexpected system event: ~p~n", [Msg]),
+ {ok, State}.
+
+report_info(Format0, Args0) ->
+ Format = "Mnesia(~p): " ++ Format0,
+ Args = [node() | Args0],
+ case global:whereis_name(mnesia_global_logger) of
+ undefined ->
+ io:format(Format, Args);
+ Pid ->
+ io:format(Pid, Format, Args)
+ end.
+
+report_warning(Format0, Args0) ->
+ Format = "Mnesia(~p): ** WARNING ** " ++ Format0,
+ Args = [node() | Args0],
+ case erlang:function_exported(error_logger, warning_msg, 2) of
+ true ->
+ error_logger:warning_msg(Format, Args);
+ false ->
+ error_logger:format(Format, Args)
+ end,
+ case global:whereis_name(mnesia_global_logger) of
+ undefined ->
+ ok;
+ Pid ->
+ io:format(Pid, Format, Args)
+ end.
+
+report_error(Format0, Args0) ->
+ Format = "Mnesia(~p): ** ERROR ** " ++ Format0,
+ Args = [node() | Args0],
+ error_logger:format(Format, Args),
+ case global:whereis_name(mnesia_global_logger) of
+ undefined ->
+ ok;
+ Pid ->
+ io:format(Pid, Format, Args)
+ end.
+
+report_fatal(Format, Args, BinaryCore, CoreDumped) ->
+ UseDir = mnesia_monitor:use_dir(),
+ CoreDir = mnesia_monitor:get_env(core_dir),
+ if
+ list(CoreDir),CoreDumped == false,binary(BinaryCore) ->
+ core_file(CoreDir,BinaryCore,Format,Args);
+ (UseDir == true),CoreDumped == false,binary(BinaryCore) ->
+ core_file(CoreDir,BinaryCore,Format,Args);
+ true ->
+ report_error("(ignoring core) ** FATAL ** " ++ Format, Args)
+ end.
+
+core_file(CoreDir,BinaryCore,Format,Args) ->
+ %% Integers = tuple_to_list(date()) ++ tuple_to_list(time()),
+ Integers = tuple_to_list(now()),
+ Fun = fun(I) when I < 10 -> ["_0",I];
+ (I) -> ["_",I]
+ end,
+ List = lists:append([Fun(I) || I <- Integers]),
+ CoreFile = if list(CoreDir) ->
+ filename:absname(lists:concat(["MnesiaCore.", node()] ++ List),
+ CoreDir);
+ true ->
+ filename:absname(lists:concat(["MnesiaCore.", node()] ++ List))
+ end,
+ case file:write_file(CoreFile, BinaryCore) of
+ ok ->
+ report_error("(core dumped to file: ~p)~n ** FATAL ** " ++ Format,
+ [CoreFile] ++ Args);
+ {error, Reason} ->
+ report_error("(could not write core file: ~p)~n ** FATAL ** " ++ Format,
+ [Reason] ++ Args)
+ end.
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag.erl
new file mode 100644
index 0000000000..e1f4e96a95
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag.erl
@@ -0,0 +1,1201 @@
+%%% ``The contents of this file are subject to the Erlang Public License,
+%%% Version 1.1, (the "License"); you may not use this file except in
+%%% compliance with the License. You should have received a copy of the
+%%% Erlang Public License along with this software. If not, it can be
+%%% retrieved via the world wide web at http://www.erlang.org/.
+%%%
+%%% Software distributed under the License is distributed on an "AS IS"
+%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%% the License for the specific language governing rights and limitations
+%%% under the License.
+%%%
+%%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%%% AB. All Rights Reserved.''
+%%%
+%%% $Id: mnesia_frag.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%%
+%%%----------------------------------------------------------------------
+%%% Purpose : Support tables so large that they need
+%%% to be divided into several fragments.
+%%%----------------------------------------------------------------------
+
+%header_doc_include
+
+-module(mnesia_frag).
+-behaviour(mnesia_access).
+
+%% Callback functions when accessed within an activity
+-export([
+ lock/4,
+ write/5, delete/5, delete_object/5,
+ read/5, match_object/5, all_keys/4,
+ select/5,
+ index_match_object/6, index_read/6,
+ foldl/6, foldr/6,
+ table_info/4
+ ]).
+
+%header_doc_include
+
+-export([
+ change_table_frag/2,
+ remove_node/2,
+ expand_cstruct/1,
+ lookup_frag_hash/1,
+ lookup_foreigners/1,
+ frag_names/1,
+ set_frag_hash/2,
+ local_select/4,
+ remote_select/4
+ ]).
+
+-include("mnesia.hrl").
+
+-define(OLD_HASH_MOD, mnesia_frag_old_hash).
+-define(DEFAULT_HASH_MOD, mnesia_frag_hash).
+%%-define(DEFAULT_HASH_MOD, ?OLD_HASH_MOD). %% BUGBUG: New should be default
+
+-record(frag_state,
+ {foreign_key,
+ n_fragments,
+ hash_module,
+ hash_state}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access functions
+
+%impl_doc_include
+
+%% Callback functions which provides transparent
+%% access of fragmented tables from any activity
+%% access context.
+
+lock(ActivityId, Opaque, {table , Tab}, LockKind) ->
+ case frag_names(Tab) of
+ [Tab] ->
+ mnesia:lock(ActivityId, Opaque, {table, Tab}, LockKind);
+ Frags ->
+ DeepNs = [mnesia:lock(ActivityId, Opaque, {table, F}, LockKind) ||
+ F <- Frags],
+ mnesia_lib:uniq(lists:append(DeepNs))
+ end;
+
+lock(ActivityId, Opaque, LockItem, LockKind) ->
+ mnesia:lock(ActivityId, Opaque, LockItem, LockKind).
+
+write(ActivityId, Opaque, Tab, Rec, LockKind) ->
+ Frag = record_to_frag_name(Tab, Rec),
+ mnesia:write(ActivityId, Opaque, Frag, Rec, LockKind).
+
+delete(ActivityId, Opaque, Tab, Key, LockKind) ->
+ Frag = key_to_frag_name(Tab, Key),
+ mnesia:delete(ActivityId, Opaque, Frag, Key, LockKind).
+
+delete_object(ActivityId, Opaque, Tab, Rec, LockKind) ->
+ Frag = record_to_frag_name(Tab, Rec),
+ mnesia:delete_object(ActivityId, Opaque, Frag, Rec, LockKind).
+
+read(ActivityId, Opaque, Tab, Key, LockKind) ->
+ Frag = key_to_frag_name(Tab, Key),
+ mnesia:read(ActivityId, Opaque, Frag, Key, LockKind).
+
+match_object(ActivityId, Opaque, Tab, HeadPat, LockKind) ->
+ MatchSpec = [{HeadPat, [], ['$_']}],
+ select(ActivityId, Opaque, Tab, MatchSpec, LockKind).
+
+select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
+ do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind).
+
+all_keys(ActivityId, Opaque, Tab, LockKind) ->
+ Match = [mnesia:all_keys(ActivityId, Opaque, Frag, LockKind)
+ || Frag <- frag_names(Tab)],
+ lists:append(Match).
+
+index_match_object(ActivityId, Opaque, Tab, Pat, Attr, LockKind) ->
+ Match =
+ [mnesia:index_match_object(ActivityId, Opaque, Frag, Pat, Attr, LockKind)
+ || Frag <- frag_names(Tab)],
+ lists:append(Match).
+
+index_read(ActivityId, Opaque, Tab, Key, Attr, LockKind) ->
+ Match =
+ [mnesia:index_read(ActivityId, Opaque, Frag, Key, Attr, LockKind)
+ || Frag <- frag_names(Tab)],
+ lists:append(Match).
+
+foldl(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ Fun2 = fun(Frag, A) ->
+ mnesia:foldl(ActivityId, Opaque, Fun, A, Frag, LockKind)
+ end,
+ lists:foldl(Fun2, Acc, frag_names(Tab)).
+
+foldr(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ Fun2 = fun(Frag, A) ->
+ mnesia:foldr(ActivityId, Opaque, Fun, A, Frag, LockKind)
+ end,
+ lists:foldr(Fun2, Acc, frag_names(Tab)).
+
+table_info(ActivityId, Opaque, {Tab, Key}, Item) ->
+ Frag = key_to_frag_name(Tab, Key),
+ table_info2(ActivityId, Opaque, Tab, Frag, Item);
+table_info(ActivityId, Opaque, Tab, Item) ->
+ table_info2(ActivityId, Opaque, Tab, Tab, Item).
+
+table_info2(ActivityId, Opaque, Tab, Frag, Item) ->
+ case Item of
+ size ->
+ SumFun = fun({_, Size}, Acc) -> Acc + Size end,
+ lists:foldl(SumFun, 0, frag_size(ActivityId, Opaque, Tab));
+ memory ->
+ SumFun = fun({_, Size}, Acc) -> Acc + Size end,
+ lists:foldl(SumFun, 0, frag_memory(ActivityId, Opaque, Tab));
+ base_table ->
+ lookup_prop(Tab, base_table);
+ node_pool ->
+ lookup_prop(Tab, node_pool);
+ n_fragments ->
+ FH = lookup_frag_hash(Tab),
+ FH#frag_state.n_fragments;
+ foreign_key ->
+ FH = lookup_frag_hash(Tab),
+ FH#frag_state.foreign_key;
+ foreigners ->
+ lookup_foreigners(Tab);
+ n_ram_copies ->
+ length(val({Tab, ram_copies}));
+ n_disc_copies ->
+ length(val({Tab, disc_copies}));
+ n_disc_only_copies ->
+ length(val({Tab, disc_only_copies}));
+
+ frag_names ->
+ frag_names(Tab);
+ frag_dist ->
+ frag_dist(Tab);
+ frag_size ->
+ frag_size(ActivityId, Opaque, Tab);
+ frag_memory ->
+ frag_memory(ActivityId, Opaque, Tab);
+ _ ->
+ mnesia:table_info(ActivityId, Opaque, Frag, Item)
+ end.
+%impl_doc_include
+
+frag_size(ActivityId, Opaque, Tab) ->
+ [{F, remote_table_info(ActivityId, Opaque, F, size)} || F <- frag_names(Tab)].
+
+frag_memory(ActivityId, Opaque, Tab) ->
+ [{F, remote_table_info(ActivityId, Opaque, F, memory)} || F <- frag_names(Tab)].
+
+
+
+remote_table_info(ActivityId, Opaque, Tab, Item) ->
+ N = val({Tab, where_to_read}),
+ case rpc:call(N, mnesia, table_info, [ActivityId, Opaque, Tab, Item]) of
+ {badrpc, _} ->
+ mnesia:abort({no_exists, Tab, Item});
+ Info ->
+ Info
+ end.
+
+do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ mnesia:select(ActivityId, Opaque, Tab, MatchSpec, LockKind);
+ FH ->
+ HashState = FH#frag_state.hash_state,
+ FragNumbers =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:match_spec_to_frag_numbers(HashState, MatchSpec);
+ HashMod ->
+ HashMod:match_spec_to_frag_numbers(HashState, MatchSpec)
+ end,
+ N = FH#frag_state.n_fragments,
+ VerifyFun = fun(F) when integer(F), F >= 1, F =< N -> false;
+ (_F) -> true
+ end,
+ case catch lists:filter(VerifyFun, FragNumbers) of
+ [] ->
+ Fun = fun(Num) ->
+ Name = n_to_frag_name(Tab, Num),
+ Node = val({Name, where_to_read}),
+ mnesia:lock(ActivityId, Opaque, {table, Name}, LockKind),
+ {Name, Node}
+ end,
+ NameNodes = lists:map(Fun, FragNumbers),
+ SelectAllFun =
+ fun(PatchedMatchSpec) ->
+ Match = [mnesia:dirty_select(Name, PatchedMatchSpec)
+ || {Name, _Node} <- NameNodes],
+ lists:append(Match)
+ end,
+ case [{Name, Node} || {Name, Node} <- NameNodes, Node /= node()] of
+ [] ->
+ %% All fragments are local
+ mnesia:fun_select(ActivityId, Opaque, Tab, MatchSpec, none, '_', SelectAllFun);
+ RemoteNameNodes ->
+ SelectFun =
+ fun(PatchedMatchSpec) ->
+ Ref = make_ref(),
+ Args = [self(), Ref, RemoteNameNodes, PatchedMatchSpec],
+ Pid = spawn_link(?MODULE, local_select, Args),
+ LocalMatch = [mnesia:dirty_select(Name, PatchedMatchSpec)
+ || {Name, Node} <- NameNodes, Node == node()],
+ OldSelectFun = fun() -> SelectAllFun(PatchedMatchSpec) end,
+ local_collect(Ref, Pid, lists:append(LocalMatch), OldSelectFun)
+ end,
+ mnesia:fun_select(ActivityId, Opaque, Tab, MatchSpec, none, '_', SelectFun)
+ end;
+ BadFrags ->
+ mnesia:abort({"match_spec_to_frag_numbers: Fragment numbers out of range",
+ BadFrags, {range, 1, N}})
+ end
+ end.
+
+local_select(ReplyTo, Ref, RemoteNameNodes, MatchSpec) ->
+ RemoteNodes = mnesia_lib:uniq([Node || {_Name, Node} <- RemoteNameNodes]),
+ Args = [ReplyTo, Ref, RemoteNameNodes, MatchSpec],
+ {Replies, BadNodes} = rpc:multicall(RemoteNodes, ?MODULE, remote_select, Args),
+ case mnesia_lib:uniq(Replies) -- [ok] of
+ [] when BadNodes == [] ->
+ ReplyTo ! {local_select, Ref, ok};
+ _ when BadNodes /= [] ->
+ ReplyTo ! {local_select, Ref, {error, {node_not_running, hd(BadNodes)}}};
+ [{badrpc, {'EXIT', Reason}} | _] ->
+ ReplyTo ! {local_select, Ref, {error, Reason}};
+ [Reason | _] ->
+ ReplyTo ! {local_select, Ref, {error, Reason}}
+ end,
+ unlink(ReplyTo),
+ exit(normal).
+
+remote_select(ReplyTo, Ref, NameNodes, MatchSpec) ->
+ do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec).
+
+do_remote_select(ReplyTo, Ref, [{Name, Node} | NameNodes], MatchSpec) ->
+ if
+ Node == node() ->
+ Res = (catch {ok, mnesia:dirty_select(Name, MatchSpec)}),
+ ReplyTo ! {remote_select, Ref, Node, Res},
+ do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec);
+ true ->
+ do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec)
+ end;
+do_remote_select(_ReplyTo, _Ref, [], _MatchSpec) ->
+ ok.
+
+local_collect(Ref, Pid, LocalMatch, OldSelectFun) ->
+ receive
+ {local_select, Ref, LocalRes} ->
+ remote_collect(Ref, LocalRes, LocalMatch, OldSelectFun);
+ {'EXIT', Pid, Reason} ->
+ remote_collect(Ref, {error, Reason}, [], OldSelectFun)
+ end.
+
+remote_collect(Ref, LocalRes = ok, Acc, OldSelectFun) ->
+ receive
+ {remote_select, Ref, Node, RemoteRes} ->
+ case RemoteRes of
+ {ok, RemoteMatch} ->
+ remote_collect(Ref, LocalRes, RemoteMatch ++ Acc, OldSelectFun);
+ _ ->
+ remote_collect(Ref, {error, {node_not_running, Node}}, [], OldSelectFun)
+ end
+ after 0 ->
+ Acc
+ end;
+remote_collect(Ref, LocalRes = {error, Reason}, _Acc, OldSelectFun) ->
+ receive
+ {remote_select, Ref, _Node, _RemoteRes} ->
+ remote_collect(Ref, LocalRes, [], OldSelectFun)
+ after 0 ->
+ mnesia:abort(Reason)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Returns a list of cstructs
+
+expand_cstruct(Cs) ->
+ expand_cstruct(Cs, create).
+
+expand_cstruct(Cs, Mode) ->
+ Tab = Cs#cstruct.name,
+ Props = Cs#cstruct.frag_properties,
+ mnesia_schema:verify({alt, [nil, list]}, mnesia_lib:etype(Props),
+ {badarg, Tab, Props}),
+ %% Verify keys
+ ValidKeys = [foreign_key, n_fragments, node_pool,
+ n_ram_copies, n_disc_copies, n_disc_only_copies,
+ hash_module, hash_state],
+ Keys = mnesia_schema:check_keys(Tab, Props, ValidKeys),
+ mnesia_schema:check_duplicates(Tab, Keys),
+
+ %% Pick fragmentation props
+ ForeignKey = mnesia_schema:pick(Tab, foreign_key, Props, undefined),
+ {ForeignKey2, N, Pool, DefaultNR, DefaultND, DefaultNDO} =
+ pick_props(Tab, Cs, ForeignKey),
+
+ %% Verify node_pool
+ BadPool = {bad_type, Tab, {node_pool, Pool}},
+ mnesia_schema:verify(list, mnesia_lib:etype(Pool), BadPool),
+ NotAtom = fun(A) when atom(A) -> false;
+ (_A) -> true
+ end,
+ mnesia_schema:verify([], [P || P <- Pool, NotAtom(P)], BadPool),
+
+ NR = mnesia_schema:pick(Tab, n_ram_copies, Props, 0),
+ ND = mnesia_schema:pick(Tab, n_disc_copies, Props, 0),
+ NDO = mnesia_schema:pick(Tab, n_disc_only_copies, Props, 0),
+
+ PosInt = fun(I) when integer(I), I >= 0 -> true;
+ (_I) -> false
+ end,
+ mnesia_schema:verify(true, PosInt(NR),
+ {bad_type, Tab, {n_ram_copies, NR}}),
+ mnesia_schema:verify(true, PosInt(ND),
+ {bad_type, Tab, {n_disc_copies, ND}}),
+ mnesia_schema:verify(true, PosInt(NDO),
+ {bad_type, Tab, {n_disc_only_copies, NDO}}),
+
+ %% Verify n_fragments
+ Cs2 = verify_n_fragments(N, Cs, Mode),
+
+ %% Verify hash callback
+ HashMod = mnesia_schema:pick(Tab, hash_module, Props, ?DEFAULT_HASH_MOD),
+ HashState = mnesia_schema:pick(Tab, hash_state, Props, undefined),
+ HashState2 = HashMod:init_state(Tab, HashState), %% BUGBUG: Catch?
+
+ FH = #frag_state{foreign_key = ForeignKey2,
+ n_fragments = 1,
+ hash_module = HashMod,
+ hash_state = HashState2},
+ if
+ NR == 0, ND == 0, NDO == 0 ->
+ do_expand_cstruct(Cs2, FH, N, Pool, DefaultNR, DefaultND, DefaultNDO, Mode);
+ true ->
+ do_expand_cstruct(Cs2, FH, N, Pool, NR, ND, NDO, Mode)
+ end.
+
+do_expand_cstruct(Cs, FH, N, Pool, NR, ND, NDO, Mode) ->
+ Tab = Cs#cstruct.name,
+
+ LC = Cs#cstruct.local_content,
+ mnesia_schema:verify(false, LC,
+ {combine_error, Tab, {local_content, LC}}),
+
+ Snmp = Cs#cstruct.snmp,
+ mnesia_schema:verify([], Snmp,
+ {combine_error, Tab, {snmp, Snmp}}),
+
+ %% Add empty fragments
+ CommonProps = [{base_table, Tab}],
+ Cs2 = Cs#cstruct{frag_properties = lists:sort(CommonProps)},
+ expand_frag_cstructs(N, NR, ND, NDO, Cs2, Pool, Pool, FH, Mode).
+
+verify_n_fragments(N, Cs, Mode) when integer(N), N >= 1 ->
+ case Mode of
+ create ->
+ Cs#cstruct{ram_copies = [],
+ disc_copies = [],
+ disc_only_copies = []};
+ activate ->
+ Reason = {combine_error, Cs#cstruct.name, {n_fragments, N}},
+ mnesia_schema:verify(1, N, Reason),
+ Cs
+ end;
+verify_n_fragments(N, Cs, _Mode) ->
+ mnesia:abort({bad_type, Cs#cstruct.name, {n_fragments, N}}).
+
+pick_props(Tab, Cs, {ForeignTab, Attr}) ->
+ mnesia_schema:verify(true, ForeignTab /= Tab,
+ {combine_error, Tab, {ForeignTab, Attr}}),
+ Props = Cs#cstruct.frag_properties,
+ Attrs = Cs#cstruct.attributes,
+
+ ForeignKey = lookup_prop(ForeignTab, foreign_key),
+ ForeignN = lookup_prop(ForeignTab, n_fragments),
+ ForeignPool = lookup_prop(ForeignTab, node_pool),
+ N = mnesia_schema:pick(Tab, n_fragments, Props, ForeignN),
+ Pool = mnesia_schema:pick(Tab, node_pool, Props, ForeignPool),
+
+ mnesia_schema:verify(ForeignN, N,
+ {combine_error, Tab, {n_fragments, N},
+ ForeignTab, {n_fragments, ForeignN}}),
+
+ mnesia_schema:verify(ForeignPool, Pool,
+ {combine_error, Tab, {node_pool, Pool},
+ ForeignTab, {node_pool, ForeignPool}}),
+
+ mnesia_schema:verify(undefined, ForeignKey,
+ {combine_error, Tab,
+ "Multiple levels of foreign_key dependencies",
+ {ForeignTab, Attr}, ForeignKey}),
+
+ Key = {ForeignTab, mnesia_schema:attr_to_pos(Attr, Attrs)},
+ DefaultNR = length(val({ForeignTab, ram_copies})),
+ DefaultND = length(val({ForeignTab, disc_copies})),
+ DefaultNDO = length(val({ForeignTab, disc_only_copies})),
+ {Key, N, Pool, DefaultNR, DefaultND, DefaultNDO};
+pick_props(Tab, Cs, undefined) ->
+ Props = Cs#cstruct.frag_properties,
+ DefaultN = 1,
+ DefaultPool = mnesia:system_info(db_nodes),
+ N = mnesia_schema:pick(Tab, n_fragments, Props, DefaultN),
+ Pool = mnesia_schema:pick(Tab, node_pool, Props, DefaultPool),
+ DefaultNR = 1,
+ DefaultND = 0,
+ DefaultNDO = 0,
+ {undefined, N, Pool, DefaultNR, DefaultND, DefaultNDO};
+pick_props(Tab, _Cs, BadKey) ->
+ mnesia:abort({bad_type, Tab, {foreign_key, BadKey}}).
+
+expand_frag_cstructs(N, NR, ND, NDO, CommonCs, Dist, Pool, FH, Mode)
+ when N > 1, Mode == create ->
+ Frag = n_to_frag_name(CommonCs#cstruct.name, N),
+ Cs = CommonCs#cstruct{name = Frag},
+ {Cs2, RevModDist, RestDist} = set_frag_nodes(NR, ND, NDO, Cs, Dist, []),
+ ModDist = lists:reverse(RevModDist),
+ Dist2 = rearrange_dist(Cs, ModDist, RestDist, Pool),
+ %% Adjusts backwards, but it doesn't matter.
+ {FH2, _FromFrags, _AdditionalWriteFrags} = adjust_before_split(FH),
+ CsList = expand_frag_cstructs(N - 1, NR, ND, NDO, CommonCs, Dist2, Pool, FH2, Mode),
+ [Cs2 | CsList];
+expand_frag_cstructs(1, NR, ND, NDO, CommonCs, Dist, Pool, FH, Mode) ->
+ BaseProps = CommonCs#cstruct.frag_properties ++
+ [{foreign_key, FH#frag_state.foreign_key},
+ {hash_module, FH#frag_state.hash_module},
+ {hash_state, FH#frag_state.hash_state},
+ {n_fragments, FH#frag_state.n_fragments},
+ {node_pool, Pool}
+ ],
+ BaseCs = CommonCs#cstruct{frag_properties = lists:sort(BaseProps)},
+ case Mode of
+ activate ->
+ [BaseCs];
+ create ->
+ {BaseCs2, _, _} = set_frag_nodes(NR, ND, NDO, BaseCs, Dist, []),
+ [BaseCs2]
+ end.
+
+set_frag_nodes(NR, ND, NDO, Cs, [Head | Tail], Acc) when NR > 0 ->
+ Pos = #cstruct.ram_copies,
+ {Cs2, Head2} = set_frag_node(Cs, Pos, Head),
+ set_frag_nodes(NR - 1, ND, NDO, Cs2, Tail, [Head2 | Acc]);
+set_frag_nodes(NR, ND, NDO, Cs, [Head | Tail], Acc) when ND > 0 ->
+ Pos = #cstruct.disc_copies,
+ {Cs2, Head2} = set_frag_node(Cs, Pos, Head),
+ set_frag_nodes(NR, ND - 1, NDO, Cs2, Tail, [Head2 | Acc]);
+set_frag_nodes(NR, ND, NDO, Cs, [Head | Tail], Acc) when NDO > 0 ->
+ Pos = #cstruct.disc_only_copies,
+ {Cs2, Head2} = set_frag_node(Cs, Pos, Head),
+ set_frag_nodes(NR, ND, NDO - 1, Cs2, Tail, [Head2 | Acc]);
+set_frag_nodes(0, 0, 0, Cs, RestDist, ModDist) ->
+ {Cs, ModDist, RestDist};
+set_frag_nodes(_, _, _, Cs, [], _) ->
+ mnesia:abort({combine_error, Cs#cstruct.name, "Too few nodes in node_pool"}).
+
+set_frag_node(Cs, Pos, Head) ->
+ Ns = element(Pos, Cs),
+ {Node, Count2} =
+ case Head of
+ {N, Count} when atom(N), integer(Count), Count >= 0 ->
+ {N, Count + 1};
+ N when atom(N) ->
+ {N, 1};
+ BadNode ->
+ mnesia:abort({bad_type, Cs#cstruct.name, BadNode})
+ end,
+ Cs2 = setelement(Pos, Cs, [Node | Ns]),
+ {Cs2, {Node, Count2}}.
+
+rearrange_dist(Cs, [{Node, Count} | ModDist], Dist, Pool) ->
+ Dist2 = insert_dist(Cs, Node, Count, Dist, Pool),
+ rearrange_dist(Cs, ModDist, Dist2, Pool);
+rearrange_dist(_Cs, [], Dist, _) ->
+ Dist.
+
+insert_dist(Cs, Node, Count, [Head | Tail], Pool) ->
+ case Head of
+ {Node2, Count2} when atom(Node2), integer(Count2), Count2 >= 0 ->
+ case node_diff(Node, Count, Node2, Count2, Pool) of
+ less ->
+ [{Node, Count}, Head | Tail];
+ greater ->
+ [Head | insert_dist(Cs, Node, Count, Tail, Pool)]
+ end;
+ Node2 when atom(Node2) ->
+ insert_dist(Cs, Node, Count, [{Node2, 0} | Tail], Pool);
+ BadNode ->
+ mnesia:abort({bad_type, Cs#cstruct.name, BadNode})
+ end;
+insert_dist(_Cs, Node, Count, [], _Pool) ->
+ [{Node, Count}];
+insert_dist(_Cs, _Node, _Count, Dist, _Pool) ->
+ mnesia:abort({bad_type, Dist}).
+
+node_diff(_Node, Count, _Node2, Count2, _Pool) when Count < Count2 ->
+ less;
+node_diff(Node, Count, Node2, Count2, Pool) when Count == Count2 ->
+ Pos = list_pos(Node, Pool, 1),
+ Pos2 = list_pos(Node2, Pool, 1),
+ if
+ Pos < Pos2 ->
+ less;
+ Pos > Pos2 ->
+ greater
+ end;
+node_diff(_Node, Count, _Node2, Count2, _Pool) when Count > Count2 ->
+ greater.
+
+%% Returns position of element in list
+list_pos(H, [H | _T], Pos) ->
+ Pos;
+list_pos(E, [_H | T], Pos) ->
+ list_pos(E, T, Pos + 1).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Switch function for changing of table fragmentation
+%%
+%% Returns a list of lists of schema ops
+
+change_table_frag(Tab, {activate, FragProps}) ->
+ make_activate(Tab, FragProps);
+change_table_frag(Tab, deactivate) ->
+ make_deactivate(Tab);
+change_table_frag(Tab, {add_frag, SortedNodes}) ->
+ make_multi_add_frag(Tab, SortedNodes);
+change_table_frag(Tab, del_frag) ->
+ make_multi_del_frag(Tab);
+change_table_frag(Tab, {add_node, Node}) ->
+ make_multi_add_node(Tab, Node);
+change_table_frag(Tab, {del_node, Node}) ->
+ make_multi_del_node(Tab, Node);
+change_table_frag(Tab, Change) ->
+ mnesia:abort({bad_type, Tab, Change}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Turn a normal table into a fragmented table
+%%
+%% The storage type must be the same on all nodes
+
+make_activate(Tab, Props) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ case Cs#cstruct.frag_properties of
+ [] ->
+ Cs2 = Cs#cstruct{frag_properties = Props},
+ [Cs3] = expand_cstruct(Cs2, activate),
+ TabDef = mnesia_schema:cs2list(Cs3),
+ Op = {op, change_table_frag, activate, TabDef},
+ [[Op]];
+ BadProps ->
+ mnesia:abort({already_exists, Tab, {frag_properties, BadProps}})
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Turn a table into a normal defragmented table
+
+make_deactivate(Tab) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ Foreigners = lookup_foreigners(Tab),
+ BaseTab = lookup_prop(Tab, base_table),
+ FH = lookup_frag_hash(Tab),
+ if
+ BaseTab /= Tab ->
+ mnesia:abort({combine_error, Tab, "Not a base table"});
+ Foreigners /= [] ->
+ mnesia:abort({combine_error, Tab, "Too many foreigners", Foreigners});
+ FH#frag_state.n_fragments > 1 ->
+ mnesia:abort({combine_error, Tab, "Too many fragments"});
+ true ->
+ Cs2 = Cs#cstruct{frag_properties = []},
+ TabDef = mnesia_schema:cs2list(Cs2),
+ Op = {op, change_table_frag, deactivate, TabDef},
+ [[Op]]
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Add a fragment to a fragmented table and fill it with half of
+%% the records from one of the old fragments
+
+make_multi_add_frag(Tab, SortedNs) when list(SortedNs) ->
+ verify_multi(Tab),
+ Ops = make_add_frag(Tab, SortedNs),
+
+ %% Propagate to foreigners
+ MoreOps = [make_add_frag(T, SortedNs) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps];
+make_multi_add_frag(Tab, SortedNs) ->
+ mnesia:abort({bad_type, Tab, SortedNs}).
+
+verify_multi(Tab) ->
+ FH = lookup_frag_hash(Tab),
+ ForeignKey = FH#frag_state.foreign_key,
+ mnesia_schema:verify(undefined, ForeignKey,
+ {combine_error, Tab,
+ "Op only allowed via foreign table",
+ {foreign_key, ForeignKey}}).
+
+make_frag_names_and_acquire_locks(Tab, N, FragIndecies, DoNotLockN) ->
+ mnesia_schema:get_tid_ts_and_lock(Tab, write),
+ Fun = fun(Index, FN) ->
+ if
+ DoNotLockN == true, Index == N ->
+ Name = n_to_frag_name(Tab, Index),
+ setelement(Index, FN, Name);
+ true ->
+ Name = n_to_frag_name(Tab, Index),
+ mnesia_schema:get_tid_ts_and_lock(Name, write),
+ setelement(Index , FN, Name)
+ end
+ end,
+ FragNames = erlang:make_tuple(N, undefined),
+ lists:foldl(Fun, FragNames, FragIndecies).
+
+make_add_frag(Tab, SortedNs) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ FH = lookup_frag_hash(Tab),
+ {FH2, FromIndecies, WriteIndecies} = adjust_before_split(FH),
+ N = FH2#frag_state.n_fragments,
+ FragNames = make_frag_names_and_acquire_locks(Tab, N, WriteIndecies, true),
+ NewFrag = element(N, FragNames),
+
+ NR = length(Cs#cstruct.ram_copies),
+ ND = length(Cs#cstruct.disc_copies),
+ NDO = length(Cs#cstruct.disc_only_copies),
+ NewCs = Cs#cstruct{name = NewFrag,
+ frag_properties = [{base_table, Tab}],
+ ram_copies = [],
+ disc_copies = [],
+ disc_only_copies = []},
+ {NewCs2, _, _} = set_frag_nodes(NR, ND, NDO, NewCs, SortedNs, []),
+ [NewOp] = mnesia_schema:make_create_table(NewCs2),
+
+ SplitOps = split(Tab, FH2, FromIndecies, FragNames, []),
+
+ Cs2 = replace_frag_hash(Cs, FH2),
+ TabDef = mnesia_schema:cs2list(Cs2),
+ BaseOp = {op, change_table_frag, {add_frag, SortedNs}, TabDef},
+
+ [BaseOp, NewOp | SplitOps].
+
+replace_frag_hash(Cs, FH) when record(FH, frag_state) ->
+ Fun = fun(Prop) ->
+ case Prop of
+ {n_fragments, _} ->
+ {true, {n_fragments, FH#frag_state.n_fragments}};
+ {hash_module, _} ->
+ {true, {hash_module, FH#frag_state.hash_module}};
+ {hash_state, _} ->
+ {true, {hash_state, FH#frag_state.hash_state}};
+ {next_n_to_split, _} ->
+ false;
+ {n_doubles, _} ->
+ false;
+ _ ->
+ true
+ end
+ end,
+ Props = lists:zf(Fun, Cs#cstruct.frag_properties),
+ Cs#cstruct{frag_properties = Props}.
+
+%% Adjust table info before split
+adjust_before_split(FH) ->
+ HashState = FH#frag_state.hash_state,
+ {HashState2, FromFrags, AdditionalWriteFrags} =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:add_frag(HashState);
+ HashMod ->
+ HashMod:add_frag(HashState)
+ end,
+ N = FH#frag_state.n_fragments + 1,
+ FromFrags2 = (catch lists:sort(FromFrags)),
+ UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))),
+ VerifyFun = fun(F) when integer(F), F >= 1, F =< N -> false;
+ (_F) -> true
+ end,
+ case catch lists:filter(VerifyFun, UnionFrags) of
+ [] ->
+ FH2 = FH#frag_state{n_fragments = N,
+ hash_state = HashState2},
+ {FH2, FromFrags2, UnionFrags};
+ BadFrags ->
+ mnesia:abort({"add_frag: Fragment numbers out of range",
+ BadFrags, {range, 1, N}})
+ end.
+
+split(Tab, FH, [SplitN | SplitNs], FragNames, Ops) ->
+ SplitFrag = element(SplitN, FragNames),
+ Pat = mnesia:table_info(SplitFrag, wild_pattern),
+ {_Mod, Tid, Ts} = mnesia_schema:get_tid_ts_and_lock(Tab, none),
+ Recs = mnesia:match_object(Tid, Ts, SplitFrag, Pat, read),
+ Ops2 = do_split(FH, SplitN, FragNames, Recs, Ops),
+ split(Tab, FH, SplitNs, FragNames, Ops2);
+split(_Tab, _FH, [], _FragNames, Ops) ->
+ Ops.
+
+%% Perform the split of the table
+do_split(FH, OldN, FragNames, [Rec | Recs], Ops) ->
+ Pos = key_pos(FH),
+ HashKey = element(Pos, Rec),
+ case key_to_n(FH, HashKey) of
+ NewN when NewN == OldN ->
+ %% Keep record in the same fragment. No need to move it.
+ do_split(FH, OldN, FragNames, Recs, Ops);
+ NewN ->
+ case element(NewN, FragNames) of
+ NewFrag when NewFrag /= undefined ->
+ OldFrag = element(OldN, FragNames),
+ Key = element(2, Rec),
+ NewOid = {NewFrag, Key},
+ OldOid = {OldFrag, Key},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ {op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
+ do_split(FH, OldN, FragNames, Recs, Ops2);
+ _NewFrag ->
+ %% Tried to move record to fragment that not is locked
+ mnesia:abort({"add_frag: Fragment not locked", NewN})
+ end
+ end;
+do_split(_FH, _OldN, _FragNames, [], Ops) ->
+ Ops.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delete a fragment from a fragmented table
+%% and merge its records with an other fragment
+
+make_multi_del_frag(Tab) ->
+ verify_multi(Tab),
+ Ops = make_del_frag(Tab),
+
+ %% Propagate to foreigners
+ MoreOps = [make_del_frag(T) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps].
+
+make_del_frag(Tab) ->
+ FH = lookup_frag_hash(Tab),
+ case FH#frag_state.n_fragments of
+ N when N > 1 ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ {FH2, FromIndecies, WriteIndecies} = adjust_before_merge(FH),
+ FragNames = make_frag_names_and_acquire_locks(Tab, N, WriteIndecies, false),
+
+ MergeOps = merge(Tab, FH2, FromIndecies, FragNames, []),
+ LastFrag = element(N, FragNames),
+ [LastOp] = mnesia_schema:make_delete_table(LastFrag, single_frag),
+ Cs2 = replace_frag_hash(Cs, FH2),
+ TabDef = mnesia_schema:cs2list(Cs2),
+ BaseOp = {op, change_table_frag, del_frag, TabDef},
+ [BaseOp, LastOp | MergeOps];
+ _ ->
+ %% Cannot remove the last fragment
+ mnesia:abort({no_exists, Tab})
+ end.
+
+%% Adjust tab info before merge
+adjust_before_merge(FH) ->
+ HashState = FH#frag_state.hash_state,
+ {HashState2, FromFrags, AdditionalWriteFrags} =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:del_frag(HashState);
+ HashMod ->
+ HashMod:del_frag(HashState)
+ end,
+ N = FH#frag_state.n_fragments,
+ FromFrags2 = (catch lists:sort(FromFrags)),
+ UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))),
+ VerifyFun = fun(F) when integer(F), F >= 1, F =< N -> false;
+ (_F) -> true
+ end,
+ case catch lists:filter(VerifyFun, UnionFrags) of
+ [] ->
+ case lists:member(N, FromFrags2) of
+ true ->
+ FH2 = FH#frag_state{n_fragments = N - 1,
+ hash_state = HashState2},
+ {FH2, FromFrags2, UnionFrags};
+ false ->
+ mnesia:abort({"del_frag: Last fragment number not included", N})
+ end;
+ BadFrags ->
+ mnesia:abort({"del_frag: Fragment numbers out of range",
+ BadFrags, {range, 1, N}})
+ end.
+
+merge(Tab, FH, [FromN | FromNs], FragNames, Ops) ->
+ FromFrag = element(FromN, FragNames),
+ Pat = mnesia:table_info(FromFrag, wild_pattern),
+ {_Mod, Tid, Ts} = mnesia_schema:get_tid_ts_and_lock(Tab, none),
+ Recs = mnesia:match_object(Tid, Ts, FromFrag, Pat, read),
+ Ops2 = do_merge(FH, FromN, FragNames, Recs, Ops),
+ merge(Tab, FH, FromNs, FragNames, Ops2);
+merge(_Tab, _FH, [], _FragNames, Ops) ->
+ Ops.
+
+%% Perform the merge of the table
+do_merge(FH, OldN, FragNames, [Rec | Recs], Ops) ->
+ Pos = key_pos(FH),
+ LastN = FH#frag_state.n_fragments + 1,
+ HashKey = element(Pos, Rec),
+ case key_to_n(FH, HashKey) of
+ NewN when NewN == LastN ->
+ %% Tried to leave a record in the fragment that is to be deleted
+ mnesia:abort({"del_frag: Fragment number out of range",
+ NewN, {range, 1, LastN}});
+ NewN when NewN == OldN ->
+ %% Keep record in the same fragment. No need to move it.
+ do_merge(FH, OldN, FragNames, Recs, Ops);
+ NewN when OldN == LastN ->
+ %% Move record from the fragment that is to be deleted
+ %% No need to create a delete op for each record.
+ case element(NewN, FragNames) of
+ NewFrag when NewFrag /= undefined ->
+ Key = element(2, Rec),
+ NewOid = {NewFrag, Key},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}} | Ops],
+ do_merge(FH, OldN, FragNames, Recs, Ops2);
+ _NewFrag ->
+ %% Tried to move record to fragment that not is locked
+ mnesia:abort({"del_frag: Fragment not locked", NewN})
+ end;
+ NewN ->
+ case element(NewN, FragNames) of
+ NewFrag when NewFrag /= undefined ->
+ OldFrag = element(OldN, FragNames),
+ Key = element(2, Rec),
+ NewOid = {NewFrag, Key},
+ OldOid = {OldFrag, Key},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ {op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
+ do_merge(FH, OldN, FragNames, Recs, Ops2);
+ _NewFrag ->
+ %% Tried to move record to fragment that not is locked
+ mnesia:abort({"del_frag: Fragment not locked", NewN})
+ end
+ end;
+ do_merge(_FH, _OldN, _FragNames, [], Ops) ->
+ Ops.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Add a node to the node pool of a fragmented table
+
+make_multi_add_node(Tab, Node) ->
+ verify_multi(Tab),
+ Ops = make_add_node(Tab, Node),
+
+ %% Propagate to foreigners
+ MoreOps = [make_add_node(T, Node) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps].
+
+make_add_node(Tab, Node) when atom(Node) ->
+ Pool = lookup_prop(Tab, node_pool),
+ case lists:member(Node, Pool) of
+ false ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ Pool2 = Pool ++ [Node],
+ Props = Cs#cstruct.frag_properties,
+ Props2 = lists:keyreplace(node_pool, 1, Props, {node_pool, Pool2}),
+ Cs2 = Cs#cstruct{frag_properties = Props2},
+ TabDef = mnesia_schema:cs2list(Cs2),
+ Op = {op, change_table_frag, {add_node, Node}, TabDef},
+ [Op];
+ true ->
+ mnesia:abort({already_exists, Tab, Node})
+ end;
+make_add_node(Tab, Node) ->
+ mnesia:abort({bad_type, Tab, Node}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delet a node from the node pool of a fragmented table
+
+make_multi_del_node(Tab, Node) ->
+ verify_multi(Tab),
+ Ops = make_del_node(Tab, Node),
+
+ %% Propagate to foreigners
+ MoreOps = [make_del_node(T, Node) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps].
+
+make_del_node(Tab, Node) when atom(Node) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ Pool = lookup_prop(Tab, node_pool),
+ case lists:member(Node, Pool) of
+ true ->
+ Pool2 = Pool -- [Node],
+ Props = lists:keyreplace(node_pool, 1, Cs#cstruct.frag_properties, {node_pool, Pool2}),
+ Cs2 = Cs#cstruct{frag_properties = Props},
+ TabDef = mnesia_schema:cs2list(Cs2),
+ Op = {op, change_table_frag, {del_node, Node}, TabDef},
+ [Op];
+ false ->
+ mnesia:abort({no_exists, Tab, Node})
+ end;
+make_del_node(Tab, Node) ->
+ mnesia:abort({bad_type, Tab, Node}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Special case used to remove all references to a node during
+%% mnesia:del_table_copy(schema, Node)
+
+remove_node(Node, Cs) ->
+ Tab = Cs#cstruct.name,
+ case is_top_frag(Tab) of
+ false ->
+ {Cs, false};
+ true ->
+ Pool = lookup_prop(Tab, node_pool),
+ case lists:member(Node, Pool) of
+ true ->
+ Pool2 = Pool -- [Node],
+ Props = lists:keyreplace(node_pool, 1,
+ Cs#cstruct.frag_properties,
+ {node_pool, Pool2}),
+ {Cs#cstruct{frag_properties = Props}, true};
+ false ->
+ {Cs, false}
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Helpers
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+set_frag_hash(Tab, Props) ->
+ case props_to_frag_hash(Tab, Props) of
+ FH when record(FH, frag_state) ->
+ mnesia_lib:set({Tab, frag_hash}, FH);
+ no_hash ->
+ mnesia_lib:unset({Tab, frag_hash})
+ end.
+
+props_to_frag_hash(_Tab, []) ->
+ no_hash;
+props_to_frag_hash(Tab, Props) ->
+ case mnesia_schema:pick(Tab, base_table, Props, undefined) of
+ T when T == Tab ->
+ Foreign = mnesia_schema:pick(Tab, foreign_key, Props, must),
+ N = mnesia_schema:pick(Tab, n_fragments, Props, must),
+
+ case mnesia_schema:pick(Tab, hash_module, Props, undefined) of
+ undefined ->
+ Split = mnesia_schema:pick(Tab, next_n_to_split, Props, must),
+ Doubles = mnesia_schema:pick(Tab, n_doubles, Props, must),
+ FH = {frag_hash, Foreign, N, Split, Doubles},
+ HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
+ #frag_state{foreign_key = Foreign,
+ n_fragments = N,
+ hash_module = ?OLD_HASH_MOD,
+ hash_state = HashState};
+ HashMod ->
+ HashState = mnesia_schema:pick(Tab, hash_state, Props, must),
+ #frag_state{foreign_key = Foreign,
+ n_fragments = N,
+ hash_module = HashMod,
+ hash_state = HashState}
+ %% Old style. Kept for backwards compatibility.
+ end;
+ _ ->
+ no_hash
+ end.
+
+lookup_prop(Tab, Prop) ->
+ Props = val({Tab, frag_properties}),
+ case lists:keysearch(Prop, 1, Props) of
+ {value, {Prop, Val}} ->
+ Val;
+ false ->
+ mnesia:abort({no_exists, Tab, Prop, {frag_properties, Props}})
+ end.
+
+lookup_frag_hash(Tab) ->
+ case ?catch_val({Tab, frag_hash}) of
+ FH when record(FH, frag_state) ->
+ FH;
+ {frag_hash, K, N, _S, _D} = FH ->
+ %% Old style. Kept for backwards compatibility.
+ HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
+ #frag_state{foreign_key = K,
+ n_fragments = N,
+ hash_module = ?OLD_HASH_MOD,
+ hash_state = HashState};
+ {'EXIT', _} ->
+ mnesia:abort({no_exists, Tab, frag_properties, frag_hash})
+ end.
+
+is_top_frag(Tab) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ false;
+ _ ->
+ [] == lookup_foreigners(Tab)
+ end.
+
+%% Returns a list of tables
+lookup_foreigners(Tab) ->
+ %% First field in HashPat is either frag_hash or frag_state
+ HashPat = {'_', {Tab, '_'}, '_', '_', '_'},
+ [T || [T] <- ?ets_match(mnesia_gvar, {{'$1', frag_hash}, HashPat})].
+
+%% Returns name of fragment table
+record_to_frag_name(Tab, Rec) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ Tab;
+ FH ->
+ Pos = key_pos(FH),
+ Key = element(Pos, Rec),
+ N = key_to_n(FH, Key),
+ n_to_frag_name(Tab, N)
+ end.
+
+key_pos(FH) ->
+ case FH#frag_state.foreign_key of
+ undefined ->
+ 2;
+ {_ForeignTab, Pos} ->
+ Pos
+ end.
+
+%% Returns name of fragment table
+key_to_frag_name({BaseTab, _} = Tab, Key) ->
+ N = key_to_frag_number(Tab, Key),
+ n_to_frag_name(BaseTab, N);
+key_to_frag_name(Tab, Key) ->
+ N = key_to_frag_number(Tab, Key),
+ n_to_frag_name(Tab, N).
+
+%% Returns name of fragment table
+n_to_frag_name(Tab, 1) ->
+ Tab;
+n_to_frag_name(Tab, N) when atom(Tab), integer(N) ->
+ list_to_atom(atom_to_list(Tab) ++ "_frag" ++ integer_to_list(N));
+n_to_frag_name(Tab, N) ->
+ mnesia:abort({bad_type, Tab, N}).
+
+%% Returns name of fragment table
+key_to_frag_number({Tab, ForeignKey}, _Key) ->
+ FH = val({Tab, frag_hash}),
+ case FH#frag_state.foreign_key of
+ {_ForeignTab, _Pos} ->
+ key_to_n(FH, ForeignKey);
+ undefined ->
+ mnesia:abort({combine_error, Tab, frag_properties,
+ {foreign_key, undefined}})
+ end;
+key_to_frag_number(Tab, Key) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ 1;
+ FH ->
+ key_to_n(FH, Key)
+ end.
+
+%% Returns fragment number
+key_to_n(FH, Key) ->
+ HashState = FH#frag_state.hash_state,
+ N =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:key_to_frag_number(HashState, Key);
+ HashMod ->
+ HashMod:key_to_frag_number(HashState, Key)
+ end,
+ if
+ integer(N), N >= 1, N =< FH#frag_state.n_fragments ->
+ N;
+ true ->
+ mnesia:abort({"key_to_frag_number: Fragment number out of range",
+ N, {range, 1, FH#frag_state.n_fragments}})
+ end.
+
+%% Returns a list of frament table names
+frag_names(Tab) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ [Tab];
+ FH ->
+ N = FH#frag_state.n_fragments,
+ frag_names(Tab, N, [])
+ end.
+
+frag_names(Tab, 1, Acc) ->
+ [Tab | Acc];
+frag_names(Tab, N, Acc) ->
+ Frag = n_to_frag_name(Tab, N),
+ frag_names(Tab, N - 1, [Frag | Acc]).
+
+%% Returns a list of {Node, FragCount} tuples
+%% sorted on FragCounts
+frag_dist(Tab) ->
+ Pool = lookup_prop(Tab, node_pool),
+ Dist = [{good, Node, 0} || Node <- Pool],
+ Dist2 = count_frag(frag_names(Tab), Dist),
+ sort_dist(Dist2).
+
+count_frag([Frag | Frags], Dist) ->
+ Dist2 = incr_nodes(val({Frag, ram_copies}), Dist),
+ Dist3 = incr_nodes(val({Frag, disc_copies}), Dist2),
+ Dist4 = incr_nodes(val({Frag, disc_only_copies}), Dist3),
+ count_frag(Frags, Dist4);
+count_frag([], Dist) ->
+ Dist.
+
+incr_nodes([Node | Nodes], Dist) ->
+ Dist2 = incr_node(Node, Dist),
+ incr_nodes(Nodes, Dist2);
+incr_nodes([], Dist) ->
+ Dist.
+
+incr_node(Node, [{Kind, Node, Count} | Tail]) ->
+ [{Kind, Node, Count + 1} | Tail];
+incr_node(Node, [Head | Tail]) ->
+ [Head | incr_node(Node, Tail)];
+incr_node(Node, []) ->
+ [{bad, Node, 1}].
+
+%% Sorts dist according in decreasing count order
+sort_dist(Dist) ->
+ Dist2 = deep_dist(Dist, []),
+ Dist3 = lists:keysort(1, Dist2),
+ shallow_dist(Dist3).
+
+deep_dist([Head | Tail], Deep) ->
+ {Kind, _Node, Count} = Head,
+ {Tag, Same, Other} = pick_count(Kind, Count, [Head | Tail]),
+ deep_dist(Other, [{Tag, Same} | Deep]);
+deep_dist([], Deep) ->
+ Deep.
+
+pick_count(Kind, Count, [{Kind2, Node2, Count2} | Tail]) ->
+ Head = {Node2, Count2},
+ {_, Same, Other} = pick_count(Kind, Count, Tail),
+ if
+ Kind == bad ->
+ {bad, [Head | Same], Other};
+ Kind2 == bad ->
+ {Count, Same, [{Kind2, Node2, Count2} | Other]};
+ Count == Count2 ->
+ {Count, [Head | Same], Other};
+ true ->
+ {Count, Same, [{Kind2, Node2, Count2} | Other]}
+ end;
+pick_count(_Kind, Count, []) ->
+ {Count, [], []}.
+
+shallow_dist([{_Tag, Shallow} | Deep]) ->
+ Shallow ++ shallow_dist(Deep);
+shallow_dist([]) ->
+ [].
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_hash.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_hash.erl
new file mode 100644
index 0000000000..19b97f8d61
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_hash.erl
@@ -0,0 +1,118 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_frag_hash.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+%%%----------------------------------------------------------------------
+%%% Purpose : Implements hashing functionality for fragmented tables
+%%%----------------------------------------------------------------------
+
+%header_doc_include
+-module(mnesia_frag_hash).
+-behaviour(mnesia_frag_hash).
+
+%% Fragmented Table Hashing callback functions
+-export([
+ init_state/2,
+ add_frag/1,
+ del_frag/1,
+ key_to_frag_number/2,
+ match_spec_to_frag_numbers/2
+ ]).
+
+%header_doc_include
+
+%impl_doc_include
+-record(hash_state, {n_fragments, next_n_to_split, n_doubles}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+init_state(_Tab, State) when State == undefined ->
+ #hash_state{n_fragments = 1,
+ next_n_to_split = 1,
+ n_doubles = 0}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_frag(State) when record(State, hash_state) ->
+ SplitN = State#hash_state.next_n_to_split,
+ P = SplitN + 1,
+ L = State#hash_state.n_doubles,
+ NewN = State#hash_state.n_fragments + 1,
+ State2 = case trunc(math:pow(2, L)) + 1 of
+ P2 when P2 == P ->
+ State#hash_state{n_fragments = NewN,
+ n_doubles = L + 1,
+ next_n_to_split = 1};
+ _ ->
+ State#hash_state{n_fragments = NewN,
+ next_n_to_split = P}
+ end,
+ {State2, [SplitN], [NewN]}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+del_frag(State) when record(State, hash_state) ->
+ P = State#hash_state.next_n_to_split - 1,
+ L = State#hash_state.n_doubles,
+ N = State#hash_state.n_fragments,
+ if
+ P < 1 ->
+ L2 = L - 1,
+ MergeN = trunc(math:pow(2, L2)),
+ State2 = State#hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN,
+ n_doubles = L2},
+ {State2, [N], [MergeN]};
+ true ->
+ MergeN = P,
+ State2 = State#hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN},
+ {State2, [N], [MergeN]}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+key_to_frag_number(State, Key) when record(State, hash_state) ->
+ L = State#hash_state.n_doubles,
+ A = erlang:phash(Key, trunc(math:pow(2, L))),
+ P = State#hash_state.next_n_to_split,
+ if
+ A < P ->
+ erlang:phash(Key, trunc(math:pow(2, L + 1)));
+ true ->
+ A
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+match_spec_to_frag_numbers(State, MatchSpec) when record(State, hash_state) ->
+ case MatchSpec of
+ [{HeadPat, _, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ KeyPat = element(2, HeadPat),
+ case has_var(KeyPat) of
+ false ->
+ [key_to_frag_number(State, KeyPat)];
+ true ->
+ lists:seq(1, State#hash_state.n_fragments)
+ end;
+ _ ->
+ lists:seq(1, State#hash_state.n_fragments)
+ end.
+
+%impl_doc_include
+
+has_var(Pat) ->
+ mnesia:has_var(Pat).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl
new file mode 100644
index 0000000000..6560613302
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl
@@ -0,0 +1,127 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_frag_old_hash.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+%%%----------------------------------------------------------------------
+%%% Purpose : Implements hashing functionality for fragmented tables
+%%%----------------------------------------------------------------------
+
+-module(mnesia_frag_old_hash).
+-behaviour(mnesia_frag_hash).
+
+%% Hashing callback functions
+-export([
+ init_state/2,
+ add_frag/1,
+ del_frag/1,
+ key_to_frag_number/2,
+ match_spec_to_frag_numbers/2
+ ]).
+
+-record(old_hash_state,
+ {n_fragments,
+ next_n_to_split,
+ n_doubles}).
+
+%% Old style. Kept for backwards compatibility.
+-record(frag_hash,
+ {foreign_key,
+ n_fragments,
+ next_n_to_split,
+ n_doubles}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+init_state(_Tab, InitialState) when InitialState == undefined ->
+ #old_hash_state{n_fragments = 1,
+ next_n_to_split = 1,
+ n_doubles = 0};
+init_state(_Tab, FH) when record(FH, frag_hash) ->
+ %% Old style. Kept for backwards compatibility.
+ #old_hash_state{n_fragments = FH#frag_hash.n_fragments,
+ next_n_to_split = FH#frag_hash.next_n_to_split,
+ n_doubles = FH#frag_hash.n_doubles}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_frag(State) when record(State, old_hash_state) ->
+ SplitN = State#old_hash_state.next_n_to_split,
+ P = SplitN + 1,
+ L = State#old_hash_state.n_doubles,
+ NewN = State#old_hash_state.n_fragments + 1,
+ State2 = case trunc(math:pow(2, L)) + 1 of
+ P2 when P2 == P ->
+ State#old_hash_state{n_fragments = NewN,
+ next_n_to_split = 1,
+ n_doubles = L + 1};
+ _ ->
+ State#old_hash_state{n_fragments = NewN,
+ next_n_to_split = P}
+ end,
+ {State2, [SplitN], [NewN]}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+del_frag(State) when record(State, old_hash_state) ->
+ P = State#old_hash_state.next_n_to_split - 1,
+ L = State#old_hash_state.n_doubles,
+ N = State#old_hash_state.n_fragments,
+ if
+ P < 1 ->
+ L2 = L - 1,
+ MergeN = trunc(math:pow(2, L2)),
+ State2 = State#old_hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN,
+ n_doubles = L2},
+ {State2, [N], [MergeN]};
+ true ->
+ MergeN = P,
+ State2 = State#old_hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN},
+ {State2, [N], [MergeN]}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+key_to_frag_number(State, Key) when record(State, old_hash_state) ->
+ L = State#old_hash_state.n_doubles,
+ A = erlang:hash(Key, trunc(math:pow(2, L))),
+ P = State#old_hash_state.next_n_to_split,
+ if
+ A < P ->
+ erlang:hash(Key, trunc(math:pow(2, L + 1)));
+ true ->
+ A
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+match_spec_to_frag_numbers(State, MatchSpec) when record(State, old_hash_state) ->
+ case MatchSpec of
+ [{HeadPat, _, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ KeyPat = element(2, HeadPat),
+ case has_var(KeyPat) of
+ false ->
+ [key_to_frag_number(State, KeyPat)];
+ true ->
+ lists:seq(1, State#old_hash_state.n_fragments)
+ end;
+ _ ->
+ lists:seq(1, State#old_hash_state.n_fragments)
+ end.
+
+has_var(Pat) ->
+ mnesia:has_var(Pat).
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_index.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_index.erl
new file mode 100644
index 0000000000..3455a4808a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_index.erl
@@ -0,0 +1,380 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_index.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+%% Purpose: Handles index functionality in mnesia
+
+-module(mnesia_index).
+-export([read/5,
+ add_index/5,
+ delete_index/3,
+ del_object_index/5,
+ clear_index/4,
+ dirty_match_object/3,
+ dirty_select/3,
+ dirty_read/3,
+ dirty_read2/3,
+
+ db_put/2,
+ db_get/2,
+ db_match_erase/2,
+ get_index_table/2,
+ get_index_table/3,
+
+ tab2filename/2,
+ tab2tmp_filename/2,
+ init_index/2,
+ init_indecies/3,
+ del_transient/2,
+ del_transient/3,
+ del_index_table/3]).
+
+-import(mnesia_lib, [verbose/2]).
+-include("mnesia.hrl").
+
+-record(index, {setorbag, pos_list}).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+%% read an object list throuh its index table
+%% we assume that table Tab has index on attribute number Pos
+
+read(Tid, Store, Tab, IxKey, Pos) ->
+ ResList = mnesia_locker:ixrlock(Tid, Store, Tab, IxKey, Pos),
+ %% Remove all tuples which don't include Ixkey, happens when Tab is a bag
+ case val({Tab, setorbag}) of
+ bag ->
+ mnesia_lib:key_search_all(IxKey, Pos, ResList);
+ _ ->
+ ResList
+ end.
+
+add_index(Index, Tab, Key, Obj, Old) ->
+ add_index2(Index#index.pos_list, Index#index.setorbag, Tab, Key, Obj, Old).
+
+add_index2([{Pos, Ixt} |Tail], bag, Tab, K, Obj, OldRecs) ->
+ db_put(Ixt, {element(Pos, Obj), K}),
+ add_index2(Tail, bag, Tab, K, Obj, OldRecs);
+add_index2([{Pos, Ixt} |Tail], Type, Tab, K, Obj, OldRecs) ->
+ %% Remove old tuples in index if Tab is updated
+ case OldRecs of
+ undefined ->
+ Old = mnesia_lib:db_get(Tab, K),
+ del_ixes(Ixt, Old, Pos, K);
+ Old ->
+ del_ixes(Ixt, Old, Pos, K)
+ end,
+ db_put(Ixt, {element(Pos, Obj), K}),
+ add_index2(Tail, Type, Tab, K, Obj, OldRecs);
+add_index2([], _, _Tab, _K, _Obj, _) -> ok.
+
+delete_index(Index, Tab, K) ->
+ delete_index2(Index#index.pos_list, Tab, K).
+
+delete_index2([{Pos, Ixt} | Tail], Tab, K) ->
+ DelObjs = mnesia_lib:db_get(Tab, K),
+ del_ixes(Ixt, DelObjs, Pos, K),
+ delete_index2(Tail, Tab, K);
+delete_index2([], _Tab, _K) -> ok.
+
+
+del_ixes(_Ixt, [], _Pos, _L) -> ok;
+del_ixes(Ixt, [Obj | Tail], Pos, Key) ->
+ db_match_erase(Ixt, {element(Pos, Obj), Key}),
+ del_ixes(Ixt, Tail, Pos, Key).
+
+del_object_index(Index, Tab, K, Obj, Old) ->
+ del_object_index2(Index#index.pos_list, Index#index.setorbag, Tab, K, Obj, Old).
+
+del_object_index2([], _, _Tab, _K, _Obj, _Old) -> ok;
+del_object_index2([{Pos, Ixt} | Tail], SoB, Tab, K, Obj, Old) ->
+ case SoB of
+ bag ->
+ del_object_bag(Tab, K, Obj, Pos, Ixt, Old);
+ _ -> %% If set remove the tuple in index table
+ del_ixes(Ixt, [Obj], Pos, K)
+ end,
+ del_object_index2(Tail, SoB, Tab, K, Obj, Old).
+
+del_object_bag(Tab, Key, Obj, Pos, Ixt, undefined) ->
+ Old = mnesia_lib:db_get(Tab, Key),
+ del_object_bag(Tab, Key, Obj, Pos, Ixt, Old);
+%% If Tab type is bag we need remove index identifier if Tab
+%% contains less than 2 elements.
+del_object_bag(_Tab, Key, Obj, Pos, Ixt, Old) when length(Old) < 2 ->
+ del_ixes(Ixt, [Obj], Pos, Key);
+del_object_bag(_Tab, _Key, _Obj, _Pos, _Ixt, _Old) -> ok.
+
+clear_index(Index, Tab, K, Obj) ->
+ clear_index2(Index#index.pos_list, Tab, K, Obj).
+
+clear_index2([], _Tab, _K, _Obj) -> ok;
+clear_index2([{_Pos, Ixt} | Tail], Tab, K, Obj) ->
+ db_match_erase(Ixt, Obj),
+ clear_index2(Tail, Tab, K, Obj).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+dirty_match_object(Tab, Pat, Pos) ->
+ %% Assume that we are on the node where the replica is
+ case element(2, Pat) of
+ '_' ->
+ IxKey = element(Pos, Pat),
+ RealKeys = realkeys(Tab, Pos, IxKey),
+ merge(RealKeys, Tab, Pat, []);
+ _Else ->
+ mnesia_lib:db_match_object(Tab, Pat)
+ end.
+
+merge([{_IxKey, RealKey} | Tail], Tab, Pat, Ack) ->
+ %% Assume that we are on the node where the replica is
+ Pat2 = setelement(2, Pat, RealKey),
+ Recs = mnesia_lib:db_match_object(Tab, Pat2),
+ merge(Tail, Tab, Pat, Recs ++ Ack);
+merge([], _, _, Ack) ->
+ Ack.
+
+realkeys(Tab, Pos, IxKey) ->
+ Index = get_index_table(Tab, Pos),
+ db_get(Index, IxKey). % a list on the form [{IxKey, RealKey1} , ....
+
+dirty_select(Tab, Spec, Pos) ->
+ %% Assume that we are on the node where the replica is
+ %% Returns the records without applying the match spec
+ %% The actual filtering is handled by the caller
+ IxKey = element(Pos, Spec),
+ RealKeys = realkeys(Tab, Pos, IxKey),
+ StorageType = val({Tab, storage_type}),
+ lists:append([mnesia_lib:db_get(StorageType, Tab, Key) || Key <- RealKeys]).
+
+dirty_read(Tab, IxKey, Pos) ->
+ ResList = mnesia:dirty_rpc(Tab, ?MODULE, dirty_read2,
+ [Tab, IxKey, Pos]),
+ case val({Tab, setorbag}) of
+ bag ->
+ %% Remove all tuples which don't include Ixkey
+ mnesia_lib:key_search_all(IxKey, Pos, ResList);
+ _ ->
+ ResList
+ end.
+
+dirty_read2(Tab, IxKey, Pos) ->
+ Ix = get_index_table(Tab, Pos),
+ Keys = db_match(Ix, {IxKey, '$1'}),
+ r_keys(Keys, Tab, []).
+
+r_keys([[H]|T],Tab,Ack) ->
+ V = mnesia_lib:db_get(Tab, H),
+ r_keys(T, Tab, V ++ Ack);
+r_keys([], _, Ack) ->
+ Ack.
+
+
+%%%%%%% Creation, Init and deletion routines for index tables
+%% We can have several indexes on the same table
+%% this can be a fairly costly operation if table is *very* large
+
+tab2filename(Tab, Pos) ->
+ mnesia_lib:dir(Tab) ++ "_" ++ integer_to_list(Pos) ++ ".DAT".
+
+tab2tmp_filename(Tab, Pos) ->
+ mnesia_lib:dir(Tab) ++ "_" ++ integer_to_list(Pos) ++ ".TMP".
+
+init_index(Tab, Storage) ->
+ PosList = val({Tab, index}),
+ init_indecies(Tab, Storage, PosList).
+
+init_indecies(Tab, Storage, PosList) ->
+ case Storage of
+ unknown ->
+ ignore;
+ disc_only_copies ->
+ init_disc_index(Tab, PosList);
+ ram_copies ->
+ make_ram_index(Tab, PosList);
+ disc_copies ->
+ make_ram_index(Tab, PosList)
+ end.
+
+%% works for both ram and disc indexes
+
+del_index_table(_, unknown, _) ->
+ ignore;
+del_index_table(Tab, Storage, Pos) ->
+ delete_transient_index(Tab, Pos, Storage),
+ mnesia_lib:del({Tab, index}, Pos).
+
+del_transient(Tab, Storage) ->
+ PosList = val({Tab, index}),
+ del_transient(Tab, PosList, Storage).
+
+del_transient(_, [], _) -> done;
+del_transient(Tab, [Pos | Tail], Storage) ->
+ delete_transient_index(Tab, Pos, Storage),
+ del_transient(Tab, Tail, Storage).
+
+delete_transient_index(Tab, Pos, disc_only_copies) ->
+ Tag = {Tab, index, Pos},
+ mnesia_monitor:unsafe_close_dets(Tag),
+ file:delete(tab2filename(Tab, Pos)),
+ del_index_info(Tab, Pos), %% Uses val(..)
+ mnesia_lib:unset({Tab, {index, Pos}});
+
+delete_transient_index(Tab, Pos, _Storage) ->
+ Ixt = val({Tab, {index, Pos}}),
+ ?ets_delete_table(Ixt),
+ del_index_info(Tab, Pos),
+ mnesia_lib:unset({Tab, {index, Pos}}).
+
+%%%%% misc functions for the index create/init/delete functions above
+
+%% assuming that the file exists.
+init_disc_index(_Tab, []) ->
+ done;
+init_disc_index(Tab, [Pos | Tail]) when integer(Pos) ->
+ Fn = tab2filename(Tab, Pos),
+ IxTag = {Tab, index, Pos},
+ file:delete(Fn),
+ Args = [{file, Fn}, {keypos, 1}, {type, bag}],
+ mnesia_monitor:open_dets(IxTag, Args),
+ Storage = disc_only_copies,
+ Key = mnesia_lib:db_first(Storage, Tab),
+ Recs = mnesia_lib:db_get(Storage, Tab, Key),
+ BinSize = size(term_to_binary(Recs)),
+ KeysPerChunk = (4000 div BinSize) + 1,
+ Init = {start, KeysPerChunk},
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+ ok = dets:init_table(IxTag, create_fun(Init, Tab, Pos)),
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_lib:set({Tab, {index, Pos}}, IxTag),
+ add_index_info(Tab, val({Tab, setorbag}), {Pos, {dets, IxTag}}),
+ init_disc_index(Tab, Tail).
+
+create_fun(Cont, Tab, Pos) ->
+ fun(read) ->
+ Data =
+ case Cont of
+ {start, KeysPerChunk} ->
+ mnesia_lib:db_init_chunk(disc_only_copies, Tab, KeysPerChunk);
+ '$end_of_table' ->
+ '$end_of_table';
+ _Else ->
+ mnesia_lib:db_chunk(disc_only_copies, Cont)
+ end,
+ case Data of
+ '$end_of_table' ->
+ end_of_input;
+ {Recs, Next} ->
+ IdxElems = [{element(Pos, Obj), element(2, Obj)} || Obj <- Recs],
+ {IdxElems, create_fun(Next, Tab, Pos)}
+ end;
+ (close) ->
+ ok
+ end.
+
+make_ram_index(_, []) ->
+ done;
+make_ram_index(Tab, [Pos | Tail]) ->
+ add_ram_index(Tab, Pos),
+ make_ram_index(Tab, Tail).
+
+add_ram_index(Tab, Pos) when integer(Pos) ->
+ verbose("Creating index for ~w ~n", [Tab]),
+ Index = mnesia_monitor:mktab(mnesia_index, [bag, public]),
+ Insert = fun(Rec, _Acc) ->
+ true = ?ets_insert(Index, {element(Pos, Rec), element(2, Rec)})
+ end,
+ mnesia_lib:db_fixtable(ram_copies, Tab, true),
+ true = ets:foldl(Insert, true, Tab),
+ mnesia_lib:db_fixtable(ram_copies, Tab, false),
+ mnesia_lib:set({Tab, {index, Pos}}, Index),
+ add_index_info(Tab, val({Tab, setorbag}), {Pos, {ram, Index}});
+add_ram_index(_Tab, snmp) ->
+ ok.
+
+add_index_info(Tab, Type, IxElem) ->
+ Commit = val({Tab, commit_work}),
+ case lists:keysearch(index, 1, Commit) of
+ false ->
+ Index = #index{setorbag = Type,
+ pos_list = [IxElem]},
+ %% Check later if mnesia_tm is sensative about the order
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit([Index | Commit]));
+ {value, Old} ->
+ %% We could check for consistency here
+ Index = Old#index{pos_list = [IxElem | Old#index.pos_list]},
+ NewC = lists:keyreplace(index, 1, Commit, Index),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC))
+ end.
+
+del_index_info(Tab, Pos) ->
+ Commit = val({Tab, commit_work}),
+ case lists:keysearch(index, 1, Commit) of
+ false ->
+ %% Something is wrong ignore
+ skip;
+ {value, Old} ->
+ case lists:keydelete(Pos, 1, Old#index.pos_list) of
+ [] ->
+ NewC = lists:keydelete(index, 1, Commit),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC));
+ New ->
+ Index = Old#index{pos_list = New},
+ NewC = lists:keyreplace(index, 1, Commit, Index),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC))
+ end
+ end.
+
+db_put({ram, Ixt}, V) ->
+ true = ?ets_insert(Ixt, V);
+db_put({dets, Ixt}, V) ->
+ ok = dets:insert(Ixt, V).
+
+db_get({ram, Ixt}, K) ->
+ ?ets_lookup(Ixt, K);
+db_get({dets, Ixt}, K) ->
+ dets:lookup(Ixt, K).
+
+db_match_erase({ram, Ixt}, Pat) ->
+ true = ?ets_match_delete(Ixt, Pat);
+db_match_erase({dets, Ixt}, Pat) ->
+ ok = dets:match_delete(Ixt, Pat).
+
+db_match({ram, Ixt}, Pat) ->
+ ?ets_match(Ixt, Pat);
+db_match({dets, Ixt}, Pat) ->
+ dets:match(Ixt, Pat).
+
+get_index_table(Tab, Pos) ->
+ get_index_table(Tab, val({Tab, storage_type}), Pos).
+
+get_index_table(Tab, ram_copies, Pos) ->
+ {ram, val({Tab, {index, Pos}})};
+get_index_table(Tab, disc_copies, Pos) ->
+ {ram, val({Tab, {index, Pos}})};
+get_index_table(Tab, disc_only_copies, Pos) ->
+ {dets, val({Tab, {index, Pos}})};
+get_index_table(_Tab, unknown, _Pos) ->
+ unknown.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_kernel_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_kernel_sup.erl
new file mode 100644
index 0000000000..899d434fdd
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_kernel_sup.erl
@@ -0,0 +1,62 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_kernel_sup.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_kernel_sup).
+
+-behaviour(supervisor).
+
+-export([start/0, init/1, supervisor_timeout/1]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% top supervisor callback functions
+
+start() ->
+ supervisor:start_link({local, mnesia_kernel_sup}, ?MODULE, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% sub supervisor callback functions
+
+init([]) ->
+ ProcLib = [mnesia_monitor, proc_lib],
+ Flags = {one_for_all, 0, timer:hours(24)}, % Trust the top supervisor
+ Workers = [worker_spec(mnesia_monitor, timer:seconds(3), [gen_server]),
+ worker_spec(mnesia_subscr, timer:seconds(3), [gen_server]),
+ worker_spec(mnesia_locker, timer:seconds(3), ProcLib),
+ worker_spec(mnesia_recover, timer:minutes(3), [gen_server]),
+ worker_spec(mnesia_tm, timer:seconds(30), ProcLib),
+ supervisor_spec(mnesia_checkpoint_sup),
+ supervisor_spec(mnesia_snmp_sup),
+ worker_spec(mnesia_controller, timer:seconds(3), [gen_server]),
+ worker_spec(mnesia_late_loader, timer:seconds(3), ProcLib)
+ ],
+ {ok, {Flags, Workers}}.
+
+worker_spec(Name, KillAfter, Modules) ->
+ KA = supervisor_timeout(KillAfter),
+ {Name, {Name, start, []}, permanent, KA, worker, [Name] ++ Modules}.
+
+supervisor_spec(Name) ->
+ {Name, {Name, start, []}, permanent, infinity, supervisor,
+ [Name, supervisor]}.
+
+-ifdef(debug_shutdown).
+supervisor_timeout(_KillAfter) -> timer:hours(24).
+-else.
+supervisor_timeout(KillAfter) -> KillAfter.
+-endif.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_late_loader.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_late_loader.erl
new file mode 100644
index 0000000000..96d00f6e81
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_late_loader.erl
@@ -0,0 +1,95 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_late_loader.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_late_loader).
+
+-export([
+ async_late_disc_load/3,
+ maybe_async_late_disc_load/3,
+ init/1,
+ start/0
+ ]).
+
+%% sys callback functions
+-export([
+ system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+-define(SERVER_NAME, ?MODULE).
+
+-record(state, {supervisor}).
+
+async_late_disc_load(Node, Tabs, Reason) ->
+ Msg = {async_late_disc_load, Tabs, Reason},
+ catch ({?SERVER_NAME, Node} ! {self(), Msg}).
+
+maybe_async_late_disc_load(Node, Tabs, Reason) ->
+ Msg = {maybe_async_late_disc_load, Tabs, Reason},
+ catch ({?SERVER_NAME, Node} ! {self(), Msg}).
+
+start() ->
+ mnesia_monitor:start_proc(?SERVER_NAME, ?MODULE, init, [self()]).
+
+init(Parent) ->
+ %% Trap exit omitted intentionally
+ register(?SERVER_NAME, self()),
+ link(whereis(mnesia_controller)), %% We may not hang
+ mnesia_controller:merge_schema(),
+ unlink(whereis(mnesia_controller)),
+ mnesia_lib:set(mnesia_status, running),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ loop(#state{supervisor = Parent}).
+
+loop(State) ->
+ receive
+ {_From, {async_late_disc_load, Tabs, Reason}} ->
+ mnesia_controller:schedule_late_disc_load(Tabs, Reason),
+ loop(State);
+
+ {_From, {maybe_async_late_disc_load, Tabs, Reason}} ->
+ GoodTabs =
+ [T || T <- Tabs,
+ lists:member(node(),
+ mnesia_recover:get_master_nodes(T))],
+ mnesia_controller:schedule_late_disc_load(GoodTabs, Reason),
+ loop(State);
+
+ {system, From, Msg} ->
+ mnesia_lib:dbg_out("~p got {system, ~p, ~p}~n",
+ [?SERVER_NAME, From, Msg]),
+ Parent = State#state.supervisor,
+ sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], State);
+
+ Msg ->
+ mnesia_lib:error("~p got unexpected message: ~p~n",
+ [?SERVER_NAME, Msg]),
+ loop(State)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, State) ->
+ loop(State).
+
+system_terminate(Reason, _Parent, _Debug, _State) ->
+ exit(Reason).
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_lib.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_lib.erl
new file mode 100644
index 0000000000..2c9e4d4fcf
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_lib.erl
@@ -0,0 +1,1278 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_lib.erl,v 1.3 2009/07/01 15:45:40 kostis Exp $
+%%
+%% This module contains all sorts of various which doesn't fit
+%% anywhere else. Basically everything is exported.
+
+-module(mnesia_lib).
+
+-include("mnesia.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-export([core_file/0]).
+
+-export([
+ active_tables/0,
+ add/2,
+ add_list/2,
+ all_nodes/0,
+%% catch_val/1,
+ cleanup_tmp_files/1,
+ copy_file/2,
+ copy_holders/1,
+ coredump/0,
+ coredump/1,
+ create_counter/1,
+ cs_to_nodes/1,
+ cs_to_storage_type/2,
+ dets_to_ets/6,
+ db_chunk/2,
+ db_init_chunk/1,
+ db_init_chunk/2,
+ db_init_chunk/3,
+ db_erase/2,
+ db_erase/3,
+ db_erase_tab/1,
+ db_erase_tab/2,
+ db_first/1,
+ db_first/2,
+ db_last/1,
+ db_last/2,
+ db_fixtable/3,
+ db_get/2,
+ db_get/3,
+ db_match_erase/2,
+ db_match_erase/3,
+ db_match_object/2,
+ db_match_object/3,
+ db_next_key/2,
+ db_next_key/3,
+ db_prev_key/2,
+ db_prev_key/3,
+ db_put/2,
+ db_put/3,
+ db_select/2,
+ db_select/3,
+ db_slot/2,
+ db_slot/3,
+ db_update_counter/3,
+ db_update_counter/4,
+ dbg_out/2,
+ del/2,
+ dets_sync_close/1,
+ dets_sync_open/2,
+ dets_sync_open/3,
+ dir/0,
+ dir/1,
+ dir_info/0,
+ dirty_rpc_error_tag/1,
+ dist_coredump/0,
+ disk_type/1,
+ disk_type/2,
+ elems/2,
+ ensure_loaded/1,
+ error/2,
+ error_desc/1,
+ etype/1,
+ exists/1,
+ fatal/2,
+ get_node_number/0,
+ fix_error/1,
+ important/2,
+ incr_counter/1,
+ incr_counter/2,
+ intersect/2,
+ is_running/0,
+ is_running/1,
+ is_running_remote/0,
+ is_string/1,
+ key_search_delete/3,
+ key_search_all/3,
+ last_error/0,
+ local_active_tables/0,
+ lock_table/1,
+ mkcore/1,
+ not_active_here/1,
+ other_val/2,
+ pad_name/3,
+ random_time/2,
+ read_counter/1,
+ readable_indecies/1,
+ remote_copy_holders/1,
+ report_fatal/2,
+ report_system_event/1,
+ running_nodes/0,
+ running_nodes/1,
+ schema_cs_to_storage_type/2,
+ search_delete/2,
+ set/2,
+ set_counter/2,
+ set_local_content_whereabouts/1,
+ set_remote_where_to_read/1,
+ set_remote_where_to_read/2,
+ show/1,
+ show/2,
+ sort_commit/1,
+ storage_type_at_node/2,
+ swap_tmp_files/1,
+ tab2dat/1,
+ tab2dmp/1,
+ tab2tmp/1,
+ tab2dcd/1,
+ tab2dcl/1,
+ to_list/1,
+ union/2,
+ uniq/1,
+ unlock_table/1,
+ unset/1,
+ update_counter/2,
+ val/1,
+ vcore/0,
+ vcore/1,
+ verbose/2,
+ view/0,
+ view/1,
+ view/2,
+ warning/2,
+
+ is_debug_compiled/0,
+ activate_debug_fun/5,
+ deactivate_debug_fun/3,
+ eval_debug_fun/4,
+ scratch_debug_fun/0
+ ]).
+
+
+search_delete(Obj, List) ->
+ search_delete(Obj, List, [], none).
+search_delete(Obj, [Obj|Tail], Ack, _Res) ->
+ search_delete(Obj, Tail, Ack, Obj);
+search_delete(Obj, [H|T], Ack, Res) ->
+ search_delete(Obj, T, [H|Ack], Res);
+search_delete(_, [], Ack, Res) ->
+ {Res, Ack}.
+
+key_search_delete(Key, Pos, TupleList) ->
+ key_search_delete(Key, Pos, TupleList, none, []).
+key_search_delete(Key, Pos, [H|T], _Obj, Ack) when element(Pos, H) == Key ->
+ key_search_delete(Key, Pos, T, H, Ack);
+key_search_delete(Key, Pos, [H|T], Obj, Ack) ->
+ key_search_delete(Key, Pos, T, Obj, [H|Ack]);
+key_search_delete(_, _, [], Obj, Ack) ->
+ {Obj, Ack}.
+
+key_search_all(Key, Pos, TupleList) ->
+ key_search_all(Key, Pos, TupleList, []).
+key_search_all(Key, N, [H|T], Ack) when element(N, H) == Key ->
+ key_search_all(Key, N, T, [H|Ack]);
+key_search_all(Key, N, [_|T], Ack) ->
+ key_search_all(Key, N, T, Ack);
+key_search_all(_, _, [], Ack) -> Ack.
+
+intersect(L1, L2) ->
+ L2 -- (L2 -- L1).
+
+elems(I, [H|T]) ->
+ [element(I, H) | elems(I, T)];
+elems(_, []) ->
+ [].
+
+%% sort_commit see to that checkpoint info is always first in
+%% commit_work structure the other info don't need to be sorted.
+sort_commit(List) ->
+ sort_commit2(List, []).
+
+sort_commit2([{checkpoints, ChkpL}| Rest], Acc) ->
+ [{checkpoints, ChkpL}| Rest] ++ Acc;
+sort_commit2([H | R], Acc) ->
+ sort_commit2(R, [H | Acc]);
+sort_commit2([], Acc) -> Acc.
+
+is_string([H|T]) ->
+ if
+ 0 =< H, H < 256, integer(H) -> is_string(T);
+ true -> false
+ end;
+is_string([]) -> true.
+
+%%%
+
+union([H|L1], L2) ->
+ case lists:member(H, L2) of
+ true -> union(L1, L2);
+ false -> [H | union(L1, L2)]
+ end;
+union([], L2) -> L2.
+
+uniq([]) ->
+ [];
+uniq(List) ->
+ [H|T] = lists:sort(List),
+ uniq1(H, T, []).
+
+uniq1(H, [H|R], Ack) ->
+ uniq1(H, R, Ack);
+uniq1(Old, [H|R], Ack) ->
+ uniq1(H, R, [Old|Ack]);
+uniq1(Old, [], Ack) ->
+ [Old| Ack].
+
+to_list(X) when list(X) -> X;
+to_list(X) -> atom_to_list(X).
+
+all_nodes() ->
+ Ns = mnesia:system_info(db_nodes) ++
+ mnesia:system_info(extra_db_nodes),
+ mnesia_lib:uniq(Ns).
+
+running_nodes() ->
+ running_nodes(all_nodes()).
+
+running_nodes(Ns) ->
+ {Replies, _BadNs} = rpc:multicall(Ns, ?MODULE, is_running_remote, []),
+ [N || {GoodState, N} <- Replies, GoodState == true].
+
+is_running_remote() ->
+ IsRunning = is_running(),
+ {IsRunning == yes, node()}.
+
+is_running(Node) when atom(Node) ->
+ case rpc:call(Node, ?MODULE, is_running, []) of
+ {badrpc, _} -> no;
+ X -> X
+ end.
+
+is_running() ->
+ case ?catch_val(mnesia_status) of
+ {'EXIT', _} -> no;
+ running -> yes;
+ starting -> starting;
+ stopping -> stopping
+ end.
+
+show(X) ->
+ show(X, []).
+show(F, A) ->
+ io:format(user, F, A).
+
+
+pad_name([Char | Chars], Len, Tail) ->
+ [Char | pad_name(Chars, Len - 1, Tail)];
+pad_name([], Len, Tail) when Len =< 0 ->
+ Tail;
+pad_name([], Len, Tail) ->
+ [$ | pad_name([], Len - 1, Tail)].
+
+%% Some utility functions .....
+active_here(Tab) ->
+ case val({Tab, where_to_read}) of
+ Node when Node == node() -> true;
+ _ -> false
+ end.
+
+not_active_here(Tab) ->
+ not active_here(Tab).
+
+exists(Fname) ->
+ case file:open(Fname, [raw,read]) of
+ {ok, F} ->file:close(F), true;
+ _ -> false
+ end.
+
+dir() -> mnesia_monitor:get_env(dir).
+
+dir(Fname) ->
+ filename:join([dir(), to_list(Fname)]).
+
+tab2dat(Tab) -> %% DETS files
+ dir(lists:concat([Tab, ".DAT"])).
+
+tab2tmp(Tab) ->
+ dir(lists:concat([Tab, ".TMP"])).
+
+tab2dmp(Tab) -> %% Dumped ets tables
+ dir(lists:concat([Tab, ".DMP"])).
+
+tab2dcd(Tab) -> %% Disc copies data
+ dir(lists:concat([Tab, ".DCD"])).
+
+tab2dcl(Tab) -> %% Disc copies log
+ dir(lists:concat([Tab, ".DCL"])).
+
+storage_type_at_node(Node, Tab) ->
+ search_key(Node, [{disc_copies, val({Tab, disc_copies})},
+ {ram_copies, val({Tab, ram_copies})},
+ {disc_only_copies, val({Tab, disc_only_copies})}]).
+
+cs_to_storage_type(Node, Cs) ->
+ search_key(Node, [{disc_copies, Cs#cstruct.disc_copies},
+ {ram_copies, Cs#cstruct.ram_copies},
+ {disc_only_copies, Cs#cstruct.disc_only_copies}]).
+
+schema_cs_to_storage_type(Node, Cs) ->
+ case cs_to_storage_type(Node, Cs) of
+ unknown when Cs#cstruct.name == schema -> ram_copies;
+ Other -> Other
+ end.
+
+
+search_key(Key, [{Val, List} | Tail]) ->
+ case lists:member(Key, List) of
+ true -> Val;
+ false -> search_key(Key, Tail)
+ end;
+search_key(_Key, []) ->
+ unknown.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% ops, we've got some global variables here :-)
+
+%% They are
+%%
+%% {Tab, setorbag}, -> set | bag
+%% {Tab, storage_type} -> disc_copies |ram_copies | unknown (**)
+%% {Tab, disc_copies} -> node list (from schema)
+%% {Tab, ram_copies}, -> node list (from schema)
+%% {Tab, arity}, -> number
+%% {Tab, attributes}, -> atom list
+%% {Tab, wild_pattern}, -> record tuple with '_'s
+%% {Tab, {index, Pos}} -> ets table
+%% {Tab, index} -> integer list
+%% {Tab, cstruct} -> cstruct structure
+%%
+
+%% The following fields are dynamic according to the
+%% the current node/table situation
+
+%% {Tab, where_to_write} -> node list
+%% {Tab, where_to_read} -> node | nowhere
+%%
+%% {schema, tables} -> tab list
+%% {schema, local_tables} -> tab list (**)
+%%
+%% {current, db_nodes} -> node list
+%%
+%% dir -> directory path (**)
+%% mnesia_status -> status | running | stopping (**)
+%% (**) == (Different on all nodes)
+%%
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+set(Var, Val) ->
+ ?ets_insert(mnesia_gvar, {Var, Val}).
+
+unset(Var) ->
+ ?ets_delete(mnesia_gvar, Var).
+
+other_val(Var, Other) ->
+ case Var of
+ {_, where_to_read} -> nowhere;
+ {_, where_to_write} -> [];
+ {_, active_replicas} -> [];
+ _ ->
+ pr_other(Var, Other)
+ end.
+
+pr_other(Var, Other) ->
+ Why =
+ case is_running() of
+ no -> {node_not_running, node()};
+ _ -> {no_exists, Var}
+ end,
+ verbose("~p (~p) val(mnesia_gvar, ~w) -> ~p ~p ~n",
+ [self(), process_info(self(), registered_name),
+ Var, Other, Why]),
+ case Other of
+ {badarg, [{ets, lookup_element, _}|_]} ->
+ exit(Why);
+ _ ->
+ erlang:error(Why)
+ end.
+
+%% Some functions for list valued variables
+add(Var, Val) ->
+ L = val(Var),
+ set(Var, [Val | lists:delete(Val, L)]).
+
+add_list(Var, List) ->
+ L = val(Var),
+ set(Var, union(L, List)).
+
+del(Var, Val) ->
+ L = val(Var),
+ set(Var, lists:delete(Val, L)).
+
+%% This function is needed due to the fact
+%% that the application_controller enters
+%% a deadlock now and then. ac is implemented
+%% as a rather naive server.
+ensure_loaded(Appl) ->
+ case application_controller:get_loaded(Appl) of
+ {true, _} ->
+ ok;
+ false ->
+ case application:load(Appl) of
+ ok ->
+ ok;
+ {error, {already_loaded, Appl}} ->
+ ok;
+ {error, Reason} ->
+ {error, {application_load_error, Reason}}
+ end
+ end.
+
+local_active_tables() ->
+ Tabs = val({schema, local_tables}),
+ lists:zf(fun(Tab) -> active_here(Tab) end, Tabs).
+
+active_tables() ->
+ Tabs = val({schema, tables}),
+ F = fun(Tab) ->
+ case val({Tab, where_to_read}) of
+ nowhere -> false;
+ _ -> {true, Tab}
+ end
+ end,
+ lists:zf(F, Tabs).
+
+etype(X) when integer(X) -> integer;
+etype([]) -> nil;
+etype(X) when list(X) -> list;
+etype(X) when tuple(X) -> tuple;
+etype(X) when atom(X) -> atom;
+etype(_) -> othertype.
+
+remote_copy_holders(Cs) ->
+ copy_holders(Cs) -- [node()].
+
+copy_holders(Cs) when Cs#cstruct.local_content == false ->
+ cs_to_nodes(Cs);
+copy_holders(Cs) when Cs#cstruct.local_content == true ->
+ case lists:member(node(), cs_to_nodes(Cs)) of
+ true -> [node()];
+ false -> []
+ end.
+
+
+set_remote_where_to_read(Tab) ->
+ set_remote_where_to_read(Tab, []).
+
+set_remote_where_to_read(Tab, Ignore) ->
+ Active = val({Tab, active_replicas}),
+ Valid =
+ case mnesia_recover:get_master_nodes(Tab) of
+ [] -> Active;
+ Masters -> mnesia_lib:intersect(Masters, Active)
+ end,
+ Available = mnesia_lib:intersect(val({current, db_nodes}), Valid -- Ignore),
+ DiscOnlyC = val({Tab, disc_only_copies}),
+ Prefered = Available -- DiscOnlyC,
+ if
+ Prefered /= [] ->
+ set({Tab, where_to_read}, hd(Prefered));
+ Available /= [] ->
+ set({Tab, where_to_read}, hd(Available));
+ true ->
+ set({Tab, where_to_read}, nowhere)
+ end.
+
+%%% Local only
+set_local_content_whereabouts(Tab) ->
+ add({schema, local_tables}, Tab),
+ add({Tab, active_replicas}, node()),
+ set({Tab, where_to_write}, [node()]),
+ set({Tab, where_to_read}, node()).
+
+%%% counter routines
+
+create_counter(Name) ->
+ set_counter(Name, 0).
+
+set_counter(Name, Val) ->
+ ?ets_insert(mnesia_gvar, {Name, Val}).
+
+incr_counter(Name) ->
+ ?ets_update_counter(mnesia_gvar, Name, 1).
+
+incr_counter(Name, I) ->
+ ?ets_update_counter(mnesia_gvar, Name, I).
+
+update_counter(Name, Val) ->
+ ?ets_update_counter(mnesia_gvar, Name, Val).
+
+read_counter(Name) ->
+ ?ets_lookup_element(mnesia_gvar, Name, 2).
+
+cs_to_nodes(Cs) ->
+ Cs#cstruct.disc_only_copies ++
+ Cs#cstruct.disc_copies ++
+ Cs#cstruct.ram_copies.
+
+dist_coredump() ->
+ dist_coredump(all_nodes()).
+dist_coredump(Ns) ->
+ {Replies, _} = rpc:multicall(Ns, ?MODULE, coredump, []),
+ Replies.
+
+coredump() ->
+ coredump({crashinfo, {"user initiated~n", []}}).
+coredump(CrashInfo) ->
+ Core = mkcore(CrashInfo),
+ Out = core_file(),
+ important("Writing Mnesia core to file: ~p...~p~n", [Out, CrashInfo]),
+ file:write_file(Out, Core),
+ Out.
+
+core_file() ->
+ Integers = tuple_to_list(date()) ++ tuple_to_list(time()),
+ Fun = fun(I) when I < 10 -> ["_0", I];
+ (I) -> ["_", I]
+ end,
+ List = lists:append([Fun(I) || I <- Integers]),
+ filename:absname(lists:concat(["MnesiaCore.", node()] ++ List)).
+
+mkcore(CrashInfo) ->
+% dbg_out("Making a Mnesia core dump...~p~n", [CrashInfo]),
+ Nodes = [node() |nodes()],
+ TidLocks = (catch ets:tab2list(mnesia_tid_locks)),
+ Core = [
+ CrashInfo,
+ {time, {date(), time()}},
+ {self, catch process_info(self())},
+ {nodes, catch rpc:multicall(Nodes, ?MODULE, get_node_number, [])},
+ {applications, catch lists:sort(application:loaded_applications())},
+ {flags, catch init:get_arguments()},
+ {code_path, catch code:get_path()},
+ {code_loaded, catch lists:sort(code:all_loaded())},
+ {etsinfo, catch ets_info(ets:all())},
+
+ {version, catch mnesia:system_info(version)},
+ {schema, catch ets:tab2list(schema)},
+ {gvar, catch ets:tab2list(mnesia_gvar)},
+ {master_nodes, catch mnesia_recover:get_master_node_info()},
+
+ {processes, catch procs()},
+ {relatives, catch relatives()},
+ {workers, catch workers(mnesia_controller:get_workers(2000))},
+ {locking_procs, catch locking_procs(TidLocks)},
+
+ {held_locks, catch mnesia:system_info(held_locks)},
+ {tid_locks, TidLocks},
+ {lock_queue, catch mnesia:system_info(lock_queue)},
+ {load_info, catch mnesia_controller:get_info(2000)},
+ {trans_info, catch mnesia_tm:get_info(2000)},
+
+ {schema_file, catch file:read_file(tab2dat(schema))},
+ {dir_info, catch dir_info()},
+ {logfile, catch {ok, read_log_files()}}
+ ],
+ term_to_binary(Core).
+
+procs() ->
+ Fun = fun(P) -> {P, (catch lists:zf(fun proc_info/1, process_info(P)))} end,
+ lists:map(Fun, processes()).
+
+proc_info({registered_name, Val}) -> {true, Val};
+proc_info({message_queue_len, Val}) -> {true, Val};
+proc_info({status, Val}) -> {true, Val};
+proc_info({current_function, Val}) -> {true, Val};
+proc_info(_) -> false.
+
+get_node_number() ->
+ {node(), self()}.
+
+read_log_files() ->
+ [{F, catch file:read_file(F)} || F <- mnesia_log:log_files()].
+
+dir_info() ->
+ {ok, Cwd} = file:get_cwd(),
+ Dir = dir(),
+ [{cwd, Cwd, file:read_file_info(Cwd)},
+ {mnesia_dir, Dir, file:read_file_info(Dir)}] ++
+ case file:list_dir(Dir) of
+ {ok, Files} ->
+ [{mnesia_file, F, catch file:read_file_info(dir(F))} || F <- Files];
+ Other ->
+ [Other]
+ end.
+
+ets_info([H|T]) ->
+ [{table, H, ets:info(H)} | ets_info(T)];
+ets_info([]) -> [].
+
+relatives() ->
+ Info = fun(Name) ->
+ case whereis(Name) of
+ undefined -> false;
+ Pid -> {true, {Name, Pid, catch process_info(Pid)}}
+ end
+ end,
+ lists:zf(Info, mnesia:ms()).
+
+workers({workers, Loader, Sender, Dumper}) ->
+ Info = fun({Name, Pid}) ->
+ case Pid of
+ undefined -> false;
+ Pid -> {true, {Name, Pid, catch process_info(Pid)}}
+ end
+ end,
+ lists:zf(Info, [{loader, Loader}, {sender, Sender}, {dumper, Dumper}]).
+
+locking_procs(LockList) when list(LockList) ->
+ Tids = [element(1, Lock) || Lock <- LockList],
+ UT = uniq(Tids),
+ Info = fun(Tid) ->
+ Pid = Tid#tid.pid,
+ case node(Pid) == node() of
+ true ->
+ {true, {Pid, catch process_info(Pid)}};
+ _ ->
+ false
+ end
+ end,
+ lists:zf(Info, UT).
+
+view() ->
+ Bin = mkcore({crashinfo, {"view only~n", []}}),
+ vcore(Bin).
+
+%% Displays a Mnesia file on the tty. The file may be repaired.
+view(File) ->
+ case suffix([".DAT", ".RET", ".DMP", ".TMP"], File) of
+ true ->
+ view(File, dat);
+ false ->
+ case suffix([".LOG", ".BUP", ".ETS"], File) of
+ true ->
+ view(File, log);
+ false ->
+ case lists:prefix("MnesiaCore.", File) of
+ true ->
+ view(File, core);
+ false ->
+ {error, "Unknown file name"}
+ end
+ end
+ end.
+
+view(File, dat) ->
+ dets:view(File);
+view(File, log) ->
+ mnesia_log:view(File);
+view(File, core) ->
+ vcore(File).
+
+suffix(Suffixes, File) ->
+ Fun = fun(S) -> lists:suffix(S, File) end,
+ lists:any(Fun, Suffixes).
+
+%% View a core file
+
+vcore() ->
+ Prefix = lists:concat(["MnesiaCore.", node()]),
+ Filter = fun(F) -> lists:prefix(Prefix, F) end,
+ {ok, Cwd} = file:get_cwd(),
+ case file:list_dir(Cwd) of
+ {ok, Files}->
+ CoreFiles = lists:sort(lists:zf(Filter, Files)),
+ show("Mnesia core files: ~p~n", [CoreFiles]),
+ vcore(lists:last(CoreFiles));
+ Error ->
+ Error
+ end.
+
+vcore(Bin) when binary(Bin) ->
+ Core = binary_to_term(Bin),
+ Fun = fun({Item, Info}) ->
+ show("***** ~p *****~n", [Item]),
+ case catch vcore_elem({Item, Info}) of
+ {'EXIT', Reason} ->
+ show("{'EXIT', ~p}~n", [Reason]);
+ _ -> ok
+ end
+ end,
+ lists:foreach(Fun, Core);
+
+vcore(File) ->
+ show("~n***** Mnesia core: ~p *****~n", [File]),
+ case file:read_file(File) of
+ {ok, Bin} ->
+ vcore(Bin);
+ _ ->
+ nocore
+ end.
+
+vcore_elem({schema_file, {ok, B}}) ->
+ Fname = "/tmp/schema.DAT",
+ file:write_file(Fname, B),
+ dets:view(Fname),
+ file:delete(Fname);
+
+vcore_elem({logfile, {ok, BinList}}) ->
+ Fun = fun({F, Info}) ->
+ show("----- logfile: ~p -----~n", [F]),
+ case Info of
+ {ok, B} ->
+ Fname = "/tmp/mnesia_vcore_elem.TMP",
+ file:write_file(Fname, B),
+ mnesia_log:view(Fname),
+ file:delete(Fname);
+ _ ->
+ show("~p~n", [Info])
+ end
+ end,
+ lists:foreach(Fun, BinList);
+
+vcore_elem({crashinfo, {Format, Args}}) ->
+ show(Format, Args);
+vcore_elem({gvar, L}) ->
+ show("~p~n", [lists:sort(L)]);
+vcore_elem({transactions, Info}) ->
+ mnesia_tm:display_info(user, Info);
+
+vcore_elem({_Item, Info}) ->
+ show("~p~n", [Info]).
+
+fix_error(X) ->
+ set(last_error, X), %% for debugabililty
+ case X of
+ {aborted, Reason} -> Reason;
+ {abort, Reason} -> Reason;
+ Y when atom(Y) -> Y;
+ {'EXIT', {_Reason, {Mod, _, _}}} when atom(Mod) ->
+ save(X),
+ case atom_to_list(Mod) of
+ [$m, $n, $e|_] -> badarg;
+ _ -> X
+ end;
+ _ -> X
+ end.
+
+last_error() ->
+ val(last_error).
+
+%% The following is a list of possible mnesia errors and what they
+%% actually mean
+
+error_desc(nested_transaction) -> "Nested transactions are not allowed";
+error_desc(badarg) -> "Bad or invalid argument, possibly bad type";
+error_desc(no_transaction) -> "Operation not allowed outside transactions";
+error_desc(combine_error) -> "Table options were ilegally combined";
+error_desc(bad_index) -> "Index already exists or was out of bounds";
+error_desc(already_exists) -> "Some schema option we try to set is already on";
+error_desc(index_exists)-> "Some ops can not be performed on tabs with index";
+error_desc(no_exists)-> "Tried to perform op on non-existing (non alive) item";
+error_desc(system_limit) -> "Some system_limit was exhausted";
+error_desc(mnesia_down) -> "A transaction involving objects at some remote "
+ "node which died while transaction was executing"
+ "*and* object(s) are no longer available elsewhere"
+ "in the network";
+error_desc(not_a_db_node) -> "A node which is non existant in "
+ "the schema was mentioned";
+error_desc(bad_type) -> "Bad type on some provided arguments";
+error_desc(node_not_running) -> "Node not running";
+error_desc(truncated_binary_file) -> "Truncated binary in file";
+error_desc(active) -> "Some delete ops require that "
+ "all active objects are removed";
+error_desc(illegal) -> "Operation not supported on object";
+error_desc({'EXIT', Reason}) ->
+ error_desc(Reason);
+error_desc({error, Reason}) ->
+ error_desc(Reason);
+error_desc({aborted, Reason}) ->
+ error_desc(Reason);
+error_desc(Reason) when tuple(Reason), size(Reason) > 0 ->
+ setelement(1, Reason, error_desc(element(1, Reason)));
+error_desc(Reason) ->
+ Reason.
+
+dirty_rpc_error_tag(Reason) ->
+ case Reason of
+ {'EXIT', _} -> badarg;
+ no_variable -> badarg;
+ _ -> no_exists
+ end.
+
+fatal(Format, Args) ->
+ catch set(mnesia_status, stopping),
+ Core = mkcore({crashinfo, {Format, Args}}),
+ report_fatal(Format, Args, Core),
+ timer:sleep(10000), % Enough to write the core dump to disc?
+ mnesia:lkill(),
+ exit(fatal).
+
+report_fatal(Format, Args) ->
+ report_fatal(Format, Args, nocore).
+
+report_fatal(Format, Args, Core) ->
+ report_system_event({mnesia_fatal, Format, Args, Core}),
+ catch exit(whereis(mnesia_monitor), fatal).
+
+%% We sleep longer and longer the more we try
+%% Made some testing and came up with the following constants
+random_time(Retries, _Counter0) ->
+% UpperLimit = 2000,
+% MaxIntv = trunc(UpperLimit * (1-(4/((Retries*Retries)+4)))),
+ UpperLimit = 500,
+ Dup = Retries * Retries,
+ MaxIntv = trunc(UpperLimit * (1-(50/((Dup)+50)))),
+
+ case get(random_seed) of
+ undefined ->
+ {X, Y, Z} = erlang:now(), %% time()
+ random:seed(X, Y, Z),
+ Time = Dup + random:uniform(MaxIntv),
+ %% dbg_out("---random_test rs ~w max ~w val ~w---~n", [Retries, MaxIntv, Time]),
+ Time;
+ _ ->
+ Time = Dup + random:uniform(MaxIntv),
+ %% dbg_out("---random_test rs ~w max ~w val ~w---~n", [Retries, MaxIntv, Time]),
+ Time
+ end.
+
+report_system_event(Event0) ->
+ Event = {mnesia_system_event, Event0},
+ report_system_event(catch_notify(Event), Event),
+ case ?catch_val(subscribers) of
+ {'EXIT', _} -> ignore;
+ Pids -> lists:foreach(fun(Pid) -> Pid ! Event end, Pids)
+ end,
+ ok.
+
+catch_notify(Event) ->
+ case whereis(mnesia_event) of
+ undefined ->
+ {'EXIT', {badarg, {mnesia_event, Event}}};
+ Pid ->
+ gen_event:notify(Pid, Event)
+ end.
+
+report_system_event({'EXIT', Reason}, Event) ->
+ Mod = mnesia_monitor:get_env(event_module),
+ case mnesia_sup:start_event() of
+ {ok, Pid} ->
+ link(Pid),
+ gen_event:call(mnesia_event, Mod, Event, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ gen_event:stop(mnesia_event),
+ ok
+ end;
+
+ Error ->
+ Msg = "Mnesia(~p): Cannot report event ~p: ~p (~p)~n",
+ error_logger:format(Msg, [node(), Event, Reason, Error])
+ end;
+report_system_event(_Res, _Event) ->
+ ignore.
+
+%% important messages are reported regardless of debug level
+important(Format, Args) ->
+ save({Format, Args}),
+ report_system_event({mnesia_info, Format, Args}).
+
+%% Warning messages are reported regardless of debug level
+warning(Format, Args) ->
+ save({Format, Args}),
+ report_system_event({mnesia_warning, Format, Args}).
+
+%% error messages are reported regardless of debug level
+error(Format, Args) ->
+ save({Format, Args}),
+ report_system_event({mnesia_error, Format, Args}).
+
+%% verbose messages are reported if debug level == debug or verbose
+verbose(Format, Args) ->
+ case mnesia_monitor:get_env(debug) of
+ none -> save({Format, Args});
+ verbose -> important(Format, Args);
+ debug -> important(Format, Args);
+ trace -> important(Format, Args)
+ end.
+
+%% debug message are display if debug level == 2
+dbg_out(Format, Args) ->
+ case mnesia_monitor:get_env(debug) of
+ none -> ignore;
+ verbose -> save({Format, Args});
+ _ -> report_system_event({mnesia_info, Format, Args})
+ end.
+
+%% Keep the last 10 debug print outs
+save(DbgInfo) ->
+ catch save2(DbgInfo).
+
+save2(DbgInfo) ->
+ Key = {'$$$_report', current_pos},
+ P =
+ case ?ets_lookup_element(mnesia_gvar, Key, 2) of
+ 30 -> -1;
+ I -> I
+ end,
+ set({'$$$_report', current_pos}, P+1),
+ set({'$$$_report', P+1}, {date(), time(), DbgInfo}).
+
+copy_file(From, To) ->
+ case file:open(From, [raw, binary, read]) of
+ {ok, F} ->
+ case file:open(To, [raw, binary, write]) of
+ {ok, T} ->
+ Res = copy_file_loop(F, T, 8000),
+ file:close(F),
+ file:close(T),
+ Res;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+copy_file_loop(F, T, ChunkSize) ->
+ case file:read(F, ChunkSize) of
+ {ok, {0, _}} ->
+ ok;
+ {ok, {_, Bin}} ->
+ file:write(T, Bin),
+ copy_file_loop(F, T, ChunkSize);
+ {ok, Bin} ->
+ file:write(T, Bin),
+ copy_file_loop(F, T, ChunkSize);
+ eof ->
+ ok;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+
+%%%%%%%%%%%%
+%% versions of all the lowlevel db funcs that determine whether we
+%% shall go to disc or ram to do the actual operation.
+
+db_get(Tab, Key) ->
+ db_get(val({Tab, storage_type}), Tab, Key).
+db_get(ram_copies, Tab, Key) -> ?ets_lookup(Tab, Key);
+db_get(disc_copies, Tab, Key) -> ?ets_lookup(Tab, Key);
+db_get(disc_only_copies, Tab, Key) -> dets:lookup(Tab, Key).
+
+db_init_chunk(Tab) ->
+ db_init_chunk(val({Tab, storage_type}), Tab, 1000).
+db_init_chunk(Tab, N) ->
+ db_init_chunk(val({Tab, storage_type}), Tab, N).
+
+db_init_chunk(disc_only_copies, Tab, N) ->
+ dets:select(Tab, [{'_', [], ['$_']}], N);
+db_init_chunk(_, Tab, N) ->
+ ets:select(Tab, [{'_', [], ['$_']}], N).
+
+db_chunk(disc_only_copies, State) ->
+ dets:select(State);
+db_chunk(_, State) ->
+ ets:select(State).
+
+db_put(Tab, Val) ->
+ db_put(val({Tab, storage_type}), Tab, Val).
+
+db_put(ram_copies, Tab, Val) -> ?ets_insert(Tab, Val), ok;
+db_put(disc_copies, Tab, Val) -> ?ets_insert(Tab, Val), ok;
+db_put(disc_only_copies, Tab, Val) -> dets:insert(Tab, Val).
+
+db_match_object(Tab, Pat) ->
+ db_match_object(val({Tab, storage_type}), Tab, Pat).
+db_match_object(Storage, Tab, Pat) ->
+ db_fixtable(Storage, Tab, true),
+ Res = catch_match_object(Storage, Tab, Pat),
+ db_fixtable(Storage, Tab, false),
+ case Res of
+ {'EXIT', Reason} -> exit(Reason);
+ _ -> Res
+ end.
+
+catch_match_object(disc_only_copies, Tab, Pat) ->
+ catch dets:match_object(Tab, Pat);
+catch_match_object(_, Tab, Pat) ->
+ catch ets:match_object(Tab, Pat).
+
+db_select(Tab, Pat) ->
+ db_select(val({Tab, storage_type}), Tab, Pat).
+
+db_select(Storage, Tab, Pat) ->
+ db_fixtable(Storage, Tab, true),
+ Res = catch_select(Storage, Tab, Pat),
+ db_fixtable(Storage, Tab, false),
+ case Res of
+ {'EXIT', Reason} -> exit(Reason);
+ _ -> Res
+ end.
+
+catch_select(disc_only_copies, Tab, Pat) ->
+ dets:select(Tab, Pat);
+catch_select(_, Tab, Pat) ->
+ ets:select(Tab, Pat).
+
+db_fixtable(ets, Tab, Bool) ->
+ ets:safe_fixtable(Tab, Bool);
+db_fixtable(ram_copies, Tab, Bool) ->
+ ets:safe_fixtable(Tab, Bool);
+db_fixtable(disc_copies, Tab, Bool) ->
+ ets:safe_fixtable(Tab, Bool);
+db_fixtable(dets, Tab, Bool) ->
+ dets:safe_fixtable(Tab, Bool);
+db_fixtable(disc_only_copies, Tab, Bool) ->
+ dets:safe_fixtable(Tab, Bool).
+
+db_erase(Tab, Key) ->
+ db_erase(val({Tab, storage_type}), Tab, Key).
+db_erase(ram_copies, Tab, Key) -> ?ets_delete(Tab, Key), ok;
+db_erase(disc_copies, Tab, Key) -> ?ets_delete(Tab, Key), ok;
+db_erase(disc_only_copies, Tab, Key) -> dets:delete(Tab, Key).
+
+db_match_erase(Tab, Pat) ->
+ db_match_erase(val({Tab, storage_type}), Tab, Pat).
+db_match_erase(ram_copies, Tab, Pat) -> ?ets_match_delete(Tab, Pat), ok;
+db_match_erase(disc_copies, Tab, Pat) -> ?ets_match_delete(Tab, Pat), ok;
+db_match_erase(disc_only_copies, Tab, Pat) -> dets:match_delete(Tab, Pat).
+
+db_first(Tab) ->
+ db_first(val({Tab, storage_type}), Tab).
+db_first(ram_copies, Tab) -> ?ets_first(Tab);
+db_first(disc_copies, Tab) -> ?ets_first(Tab);
+db_first(disc_only_copies, Tab) -> dets:first(Tab).
+
+db_next_key(Tab, Key) ->
+ db_next_key(val({Tab, storage_type}), Tab, Key).
+db_next_key(ram_copies, Tab, Key) -> ?ets_next(Tab, Key);
+db_next_key(disc_copies, Tab, Key) -> ?ets_next(Tab, Key);
+db_next_key(disc_only_copies, Tab, Key) -> dets:next(Tab, Key).
+
+db_last(Tab) ->
+ db_last(val({Tab, storage_type}), Tab).
+db_last(ram_copies, Tab) -> ?ets_last(Tab);
+db_last(disc_copies, Tab) -> ?ets_last(Tab);
+db_last(disc_only_copies, Tab) -> dets:first(Tab). %% Dets don't have order
+
+db_prev_key(Tab, Key) ->
+ db_prev_key(val({Tab, storage_type}), Tab, Key).
+db_prev_key(ram_copies, Tab, Key) -> ?ets_prev(Tab, Key);
+db_prev_key(disc_copies, Tab, Key) -> ?ets_prev(Tab, Key);
+db_prev_key(disc_only_copies, Tab, Key) -> dets:next(Tab, Key). %% Dets don't have order
+
+db_slot(Tab, Pos) ->
+ db_slot(val({Tab, storage_type}), Tab, Pos).
+db_slot(ram_copies, Tab, Pos) -> ?ets_slot(Tab, Pos);
+db_slot(disc_copies, Tab, Pos) -> ?ets_slot(Tab, Pos);
+db_slot(disc_only_copies, Tab, Pos) -> dets:slot(Tab, Pos).
+
+db_update_counter(Tab, C, Val) ->
+ db_update_counter(val({Tab, storage_type}), Tab, C, Val).
+db_update_counter(ram_copies, Tab, C, Val) ->
+ ?ets_update_counter(Tab, C, Val);
+db_update_counter(disc_copies, Tab, C, Val) ->
+ ?ets_update_counter(Tab, C, Val);
+db_update_counter(disc_only_copies, Tab, C, Val) ->
+ dets:update_counter(Tab, C, Val).
+
+db_erase_tab(Tab) ->
+ db_erase_tab(val({Tab, storage_type}), Tab).
+db_erase_tab(ram_copies, Tab) -> ?ets_delete_table(Tab);
+db_erase_tab(disc_copies, Tab) -> ?ets_delete_table(Tab);
+db_erase_tab(disc_only_copies, _Tab) -> ignore.
+
+%% assuming that Tab is a valid ets-table
+dets_to_ets(Tabname, Tab, File, Type, Rep, Lock) ->
+ {Open, Close} = mkfuns(Lock),
+ case Open(Tabname, [{file, File}, {type, disk_type(Tab, Type)},
+ {keypos, 2}, {repair, Rep}]) of
+ {ok, Tabname} ->
+ Res = dets:to_ets(Tabname, Tab),
+ Close(Tabname),
+ trav_ret(Res, Tab);
+ Other ->
+ Other
+ end.
+
+trav_ret(Tabname, Tabname) -> loaded;
+trav_ret(Other, _Tabname) -> Other.
+
+mkfuns(yes) ->
+ {fun(Tab, Args) -> dets_sync_open(Tab, Args) end,
+ fun(Tab) -> dets_sync_close(Tab) end};
+mkfuns(no) ->
+ {fun(Tab, Args) -> dets:open_file(Tab, Args) end,
+ fun(Tab) -> dets:close(Tab) end}.
+
+disk_type(Tab) ->
+ disk_type(Tab, val({Tab, setorbag})).
+
+disk_type(_Tab, ordered_set) ->
+ set;
+disk_type(_, Type) ->
+ Type.
+
+dets_sync_open(Tab, Ref, File) ->
+ Args = [{file, File},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, disk_type(Tab)}],
+ dets_sync_open(Ref, Args).
+
+lock_table(Tab) ->
+ global:set_lock({{mnesia_table_lock, Tab}, self()}, [node()], infinity).
+% dbg_out("dets_sync_open: ~p ~p~n", [T, self()]),
+
+unlock_table(Tab) ->
+ global:del_lock({{mnesia_table_lock, Tab}, self()}, [node()]).
+% dbg_out("unlock_table: ~p ~p~n", [T, self()]),
+
+dets_sync_open(Tab, Args) ->
+ lock_table(Tab),
+ case dets:open_file(Tab, Args) of
+ {ok, Tab} ->
+ {ok, Tab};
+ Other ->
+ dets_sync_close(Tab),
+ Other
+ end.
+
+dets_sync_close(Tab) ->
+ catch dets:close(Tab),
+ unlock_table(Tab),
+ ok.
+
+cleanup_tmp_files([Tab | Tabs]) ->
+ dets_sync_close(Tab),
+ file:delete(tab2tmp(Tab)),
+ cleanup_tmp_files(Tabs);
+cleanup_tmp_files([]) ->
+ ok.
+
+%% Returns a list of bad tables
+swap_tmp_files([Tab | Tabs]) ->
+ dets_sync_close(Tab),
+ Tmp = tab2tmp(Tab),
+ Dat = tab2dat(Tab),
+ case file:rename(Tmp, Dat) of
+ ok ->
+ swap_tmp_files(Tabs);
+ _ ->
+ file:delete(Tmp),
+ [Tab | swap_tmp_files(Tabs)]
+ end;
+swap_tmp_files([]) ->
+ [].
+
+readable_indecies(Tab) ->
+ val({Tab, index}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Managing conditional debug functions
+%%
+%% The main idea with the debug_fun's is to allow test programs
+%% to control the internal behaviour of Mnesia. This is needed
+%% to make the test programs independent of system load, swapping
+%% and other circumstances that may affect the behaviour of Mnesia.
+%%
+%% First should calls to ?eval_debug_fun be inserted at well
+%% defined places in Mnesia's code. E.g. in critical situations
+%% of startup, transaction commit, backups etc.
+%%
+%% Then compile Mnesia with the compiler option 'debug'.
+%%
+%% In test programs ?activate_debug_fun should be called
+%% in order to bind a fun to the debug identifier stated
+%% in the call to ?eval_debug_fun.
+%%
+%% If eval_debug_fun finds that the fun is activated it
+%% invokes the fun as NewContext = Fun(PreviousContext, EvalContext)
+%% and replaces the PreviousContext with the NewContext.
+%% The initial context of a debug_fun is given as argument to
+%% activate_debug_fun.
+
+-define(DEBUG_TAB, mnesia_debug).
+-record(debug_info, {id, function, context, file, line}).
+
+scratch_debug_fun() ->
+ dbg_out("scratch_debug_fun(): ~p~n", [?DEBUG_TAB]),
+ (catch ?ets_delete_table(?DEBUG_TAB)),
+ ?ets_new_table(?DEBUG_TAB, [set, public, named_table, {keypos, 2}]).
+
+activate_debug_fun(FunId, Fun, InitialContext, File, Line) ->
+ Info = #debug_info{id = FunId,
+ function = Fun,
+ context = InitialContext,
+ file = File,
+ line = Line
+ },
+ update_debug_info(Info).
+
+update_debug_info(Info) ->
+ case catch ?ets_insert(?DEBUG_TAB, Info) of
+ {'EXIT', _} ->
+ scratch_debug_fun(),
+ ?ets_insert(?DEBUG_TAB, Info);
+ _ ->
+ ok
+ end,
+ dbg_out("update_debug_info(~p)~n", [Info]),
+ ok.
+
+deactivate_debug_fun(FunId, _File, _Line) ->
+ catch ?ets_delete(?DEBUG_TAB, FunId),
+ ok.
+
+eval_debug_fun(FunId, EvalContext, EvalFile, EvalLine) ->
+ case catch ?ets_lookup(?DEBUG_TAB, FunId) of
+ [] ->
+ ok;
+ [Info] ->
+ OldContext = Info#debug_info.context,
+ dbg_out("~s(~p): ~w "
+ "activated in ~s(~p)~n "
+ "eval_debug_fun(~w, ~w)~n",
+ [filename:basename(EvalFile), EvalLine, Info#debug_info.id,
+ filename:basename(Info#debug_info.file), Info#debug_info.line,
+ OldContext, EvalContext]),
+ Fun = Info#debug_info.function,
+ NewContext = Fun(OldContext, EvalContext),
+
+ case catch ?ets_lookup(?DEBUG_TAB, FunId) of
+ [Info] when NewContext /= OldContext ->
+ NewInfo = Info#debug_info{context = NewContext},
+ update_debug_info(NewInfo);
+ _ ->
+ ok
+ end;
+ {'EXIT', _} -> ok
+ end.
+
+-ifdef(debug).
+ is_debug_compiled() -> true.
+-else.
+ is_debug_compiled() -> false.
+-endif.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_loader.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_loader.erl
new file mode 100644
index 0000000000..df3309cfa6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_loader.erl
@@ -0,0 +1,805 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_loader.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+%%% Purpose : Loads tables from local disc or from remote node
+
+-module(mnesia_loader).
+
+%% Mnesia internal stuff
+-export([disc_load_table/2,
+ net_load_table/4,
+ send_table/3]).
+
+-export([old_node_init_table/6]). %% Spawned old node protocol conversion hack
+-export([spawned_receiver/8]). %% Spawned lock taking process
+
+-import(mnesia_lib, [set/2, fatal/2, verbose/2, dbg_out/2]).
+
+-include("mnesia.hrl").
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Load a table from local disc
+
+disc_load_table(Tab, Reason) ->
+ Storage = val({Tab, storage_type}),
+ Type = val({Tab, setorbag}),
+ dbg_out("Getting table ~p (~p) from disc: ~p~n",
+ [Tab, Storage, Reason]),
+ ?eval_debug_fun({?MODULE, do_get_disc_copy},
+ [{tab, Tab},
+ {reason, Reason},
+ {storage, Storage},
+ {type, Type}]),
+ do_get_disc_copy2(Tab, Reason, Storage, Type).
+
+do_get_disc_copy2(Tab, _Reason, Storage, _Type) when Storage == unknown ->
+ verbose("Local table copy of ~p has recently been deleted, ignored.~n",
+ [Tab]),
+ {loaded, ok}; %% ?
+do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
+ %% NOW we create the actual table
+ Repair = mnesia_monitor:get_env(auto_repair),
+ Args = [{keypos, 2}, public, named_table, Type],
+ case Reason of
+ {dumper, _} -> %% Resources allready allocated
+ ignore;
+ _ ->
+ mnesia_monitor:mktab(Tab, Args),
+ Count = mnesia_log:dcd2ets(Tab, Repair),
+ case ets:info(Tab, size) of
+ X when X < Count * 4 ->
+ ok = mnesia_log:ets2dcd(Tab);
+ _ ->
+ ignore
+ end
+ end,
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+
+do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == ram_copies ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ case Reason of
+ {dumper, _} -> %% Resources allready allocated
+ ignore;
+ _ ->
+ mnesia_monitor:mktab(Tab, Args),
+ Fname = mnesia_lib:tab2dcd(Tab),
+ Datname = mnesia_lib:tab2dat(Tab),
+ Repair = mnesia_monitor:get_env(auto_repair),
+ case mnesia_monitor:use_dir() of
+ true ->
+ case mnesia_lib:exists(Fname) of
+ true -> mnesia_log:dcd2ets(Tab, Repair);
+ false ->
+ case mnesia_lib:exists(Datname) of
+ true ->
+ mnesia_lib:dets_to_ets(Tab, Tab, Datname,
+ Type, Repair, no);
+ false ->
+ false
+ end
+ end;
+ false ->
+ false
+ end
+ end,
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+
+do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_only_copies ->
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ case Reason of
+ {dumper, _} ->
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+ _ ->
+ case mnesia_monitor:open_dets(Tab, Args) of
+ {ok, _} ->
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+ {error, Error} ->
+ {not_loaded, {"Failed to create dets table", Error}}
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Load a table from a remote node
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Receiver Sender
+%% -------- ------
+%% Grab schema lock on table
+%% Determine table size
+%% Create empty pre-grown table
+%% Grab read lock on table
+%% Let receiver subscribe on updates done on sender node
+%% Disable rehashing of table
+%% Release read lock on table
+%% Send table to receiver in chunks
+%%
+%% Grab read lock on table
+%% Block dirty updates
+%% Update wherabouts
+%%
+%% Cancel the update subscription
+%% Process the subscription events
+%% Optionally dump to disc
+%% Unblock dirty updates
+%% Release read lock on table
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-define(MAX_TRANSFER_SIZE, 7500).
+-define(MAX_RAM_FILE_SIZE, 1000000).
+-define(MAX_RAM_TRANSFERS, (?MAX_RAM_FILE_SIZE div ?MAX_TRANSFER_SIZE) + 1).
+-define(MAX_NOPACKETS, 20).
+
+net_load_table(Tab, Reason, Ns, Cs)
+ when Reason == {dumper,add_table_copy} ->
+ try_net_load_table(Tab, Reason, Ns, Cs);
+net_load_table(Tab, Reason, Ns, _Cs) ->
+ try_net_load_table(Tab, Reason, Ns, val({Tab, cstruct})).
+
+try_net_load_table(Tab, _Reason, [], _Cs) ->
+ verbose("Copy failed. No active replicas of ~p are available.~n", [Tab]),
+ {not_loaded, none_active};
+try_net_load_table(Tab, Reason, Ns, Cs) ->
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ do_get_network_copy(Tab, Reason, Ns, Storage, Cs).
+
+do_get_network_copy(Tab, _Reason, _Ns, unknown, _Cs) ->
+ verbose("Local table copy of ~p has recently been deleted, ignored.~n", [Tab]),
+ {not_loaded, storage_unknown};
+do_get_network_copy(Tab, Reason, Ns, Storage, Cs) ->
+ [Node | Tail] = Ns,
+ dbg_out("Getting table ~p (~p) from node ~p: ~p~n",
+ [Tab, Storage, Node, Reason]),
+ ?eval_debug_fun({?MODULE, do_get_network_copy},
+ [{tab, Tab}, {reason, Reason},
+ {nodes, Ns}, {storage, Storage}]),
+ mnesia_controller:start_remote_sender(Node, Tab, self(), Storage),
+ put(mnesia_table_sender_node, {Tab, Node}),
+ case init_receiver(Node, Tab, Storage, Cs, Reason) of
+ ok ->
+ set({Tab, load_node}, Node),
+ set({Tab, load_reason}, Reason),
+ mnesia_controller:i_have_tab(Tab),
+ dbg_out("Table ~p copied from ~p to ~p~n", [Tab, Node, node()]),
+ {loaded, ok};
+ Err = {error, _} when element(1, Reason) == dumper ->
+ {not_loaded,Err};
+ restart ->
+ try_net_load_table(Tab, Reason, Tail, Cs);
+ down ->
+ try_net_load_table(Tab, Reason, Tail, Cs)
+ end.
+
+snmpify(Tab, Storage) ->
+ do_snmpify(Tab, val({Tab, snmp}), Storage).
+
+do_snmpify(_Tab, [], _Storage) ->
+ ignore;
+do_snmpify(Tab, Us, Storage) ->
+ Snmp = mnesia_snmp_hook:create_table(Us, Tab, Storage),
+ set({Tab, {index, snmp}}, Snmp).
+
+%% Start the recieiver
+%% Sender should be started first, so we don't have the schema-read
+%% lock to long (or get stuck in a deadlock)
+init_receiver(Node, Tab, Storage, Cs, Reason) ->
+ receive
+ {SenderPid, {first, TabSize}} ->
+ spawn_receiver(Tab,Storage,Cs,SenderPid,
+ TabSize,false,Reason);
+ {SenderPid, {first, TabSize, DetsData}} ->
+ spawn_receiver(Tab,Storage,Cs,SenderPid,
+ TabSize,DetsData,Reason);
+ %% Protocol conversion hack
+ {copier_done, Node} ->
+ dbg_out("Sender of table ~p crashed on node ~p ~n", [Tab, Node]),
+ down(Tab, Storage)
+ end.
+
+
+table_init_fun(SenderPid) ->
+ PConv = mnesia_monitor:needs_protocol_conversion(node(SenderPid)),
+ MeMyselfAndI = self(),
+ fun(read) ->
+ Receiver =
+ if
+ PConv == true ->
+ MeMyselfAndI ! {actual_tabrec, self()},
+ MeMyselfAndI; %% Old mnesia
+ PConv == false -> self()
+ end,
+ SenderPid ! {Receiver, more},
+ get_data(SenderPid, Receiver)
+ end.
+
+
+%% Add_table_copy get's it's own locks.
+spawn_receiver(Tab,Storage,Cs,SenderPid,TabSize,DetsData,{dumper,add_table_copy}) ->
+ Init = table_init_fun(SenderPid),
+ case do_init_table(Tab,Storage,Cs,SenderPid,TabSize,DetsData,self(), Init) of
+ Err = {error, _} ->
+ SenderPid ! {copier_done, node()},
+ Err;
+ Else ->
+ Else
+ end;
+
+spawn_receiver(Tab,Storage,Cs,SenderPid,
+ TabSize,DetsData,Reason) ->
+ %% Grab a schema lock to avoid deadlock between table_loader and schema_commit dumping.
+ %% Both may grab tables-locks in different order.
+ Load = fun() ->
+ {_,Tid,Ts} = get(mnesia_activity_state),
+ mnesia_locker:rlock(Tid, Ts#tidstore.store,
+ {schema, Tab}),
+ Init = table_init_fun(SenderPid),
+ Pid = spawn_link(?MODULE, spawned_receiver,
+ [self(),Tab,Storage,Cs,
+ SenderPid,TabSize,DetsData,
+ Init]),
+ put(mnesia_real_loader, Pid),
+ wait_on_load_complete(Pid)
+ end,
+ Res = case mnesia:transaction(Load, 20) of
+ {'atomic', {error,Result}} when element(1,Reason) == dumper ->
+ SenderPid ! {copier_done, node()},
+ {error,Result};
+ {'atomic', {error,Result}} ->
+ SenderPid ! {copier_done, node()},
+ fatal("Cannot create table ~p: ~p~n",
+ [[Tab, Storage], Result]);
+ {'atomic', Result} -> Result;
+ {aborted, nomore} ->
+ SenderPid ! {copier_done, node()},
+ restart;
+ {aborted, _ } ->
+ SenderPid ! {copier_done, node()},
+ down %% either this node or sender is dying
+ end,
+ unlink(whereis(mnesia_tm)), %% Avoid late unlink from tm
+ Res.
+
+spawned_receiver(ReplyTo,Tab,Storage,Cs,
+ SenderPid,TabSize,DetsData, Init) ->
+ process_flag(trap_exit, true),
+ Done = do_init_table(Tab,Storage,Cs,
+ SenderPid,TabSize,DetsData,
+ ReplyTo, Init),
+ ReplyTo ! {self(),Done},
+ unlink(ReplyTo),
+ unlink(whereis(mnesia_controller)),
+ exit(normal).
+
+wait_on_load_complete(Pid) ->
+ receive
+ {Pid, Res} ->
+ Res;
+ {'EXIT', Pid, Reason} ->
+ exit(Reason);
+ Else ->
+ Pid ! Else,
+ wait_on_load_complete(Pid)
+ end.
+
+tab_receiver(Node, Tab, Storage, Cs, PConv, OrigTabRec) ->
+ receive
+ {SenderPid, {no_more, DatBin}} when PConv == false ->
+ finish_copy(Storage,Tab,Cs,SenderPid,DatBin,OrigTabRec);
+
+ %% Protocol conversion hack
+ {SenderPid, {no_more, DatBin}} when pid(PConv) ->
+ PConv ! {SenderPid, no_more},
+ receive
+ {old_init_table_complete, ok} ->
+ finish_copy(Storage, Tab, Cs, SenderPid, DatBin,OrigTabRec);
+ {old_init_table_complete, Reason} ->
+ Msg = "OLD: [d]ets:init table failed",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage)
+ end;
+
+ {actual_tabrec, Pid} ->
+ tab_receiver(Node, Tab, Storage, Cs, Pid,OrigTabRec);
+
+ {SenderPid, {more, [Recs]}} when pid(PConv) ->
+ PConv ! {SenderPid, {more, Recs}}, %% Forward Msg to OldNodes
+ tab_receiver(Node, Tab, Storage, Cs, PConv,OrigTabRec);
+
+ {'EXIT', PConv, Reason} -> %% [d]ets:init process crashed
+ Msg = "Receiver crashed",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage);
+
+ %% Protocol conversion hack
+ {copier_done, Node} ->
+ dbg_out("Sender of table ~p crashed on node ~p ~n", [Tab, Node]),
+ down(Tab, Storage);
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ tab_receiver(Node, Tab, Storage, Cs, PConv,OrigTabRec)
+ end.
+
+create_table(Tab, TabSize, Storage, Cs) ->
+ if
+ Storage == disc_only_copies ->
+ mnesia_lib:lock_table(Tab),
+ Tmp = mnesia_lib:tab2tmp(Tab),
+ Size = lists:max([TabSize, 256]),
+ Args = [{file, Tmp},
+ {keypos, 2},
+%% {ram_file, true},
+ {estimated_no_objects, Size},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)}],
+ file:delete(Tmp),
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, _} ->
+ mnesia_lib:unlock_table(Tab),
+ {Storage, Tab};
+ Else ->
+ mnesia_lib:unlock_table(Tab),
+ Else
+ end;
+ (Storage == ram_copies) or (Storage == disc_copies) ->
+ Args = [{keypos, 2}, public, named_table, Cs#cstruct.type],
+ case mnesia_monitor:unsafe_mktab(Tab, Args) of
+ Tab ->
+ {Storage, Tab};
+ Else ->
+ Else
+ end
+ end.
+
+do_init_table(Tab,Storage,Cs,SenderPid,
+ TabSize,DetsInfo,OrigTabRec,Init) ->
+ case create_table(Tab, TabSize, Storage, Cs) of
+ {Storage,Tab} ->
+ %% Debug info
+ Node = node(SenderPid),
+ put(mnesia_table_receiver, {Tab, Node, SenderPid}),
+ mnesia_tm:block_tab(Tab),
+ PConv = mnesia_monitor:needs_protocol_conversion(Node),
+
+ case init_table(Tab,Storage,Init,PConv,DetsInfo,SenderPid) of
+ ok ->
+ tab_receiver(Node,Tab,Storage,Cs,PConv,OrigTabRec);
+ Reason ->
+ Msg = "[d]ets:init table failed",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage)
+ end;
+ Error ->
+ Error
+ end.
+
+make_table_fun(Pid, TabRec) ->
+ fun(close) ->
+ ok;
+ (read) ->
+ get_data(Pid, TabRec)
+ end.
+
+get_data(Pid, TabRec) ->
+ receive
+ {Pid, {more, Recs}} ->
+ Pid ! {TabRec, more},
+ {Recs, make_table_fun(Pid,TabRec)};
+ {Pid, no_more} ->
+ end_of_input;
+ {copier_done, Node} ->
+ case node(Pid) of
+ Node ->
+ {copier_done, Node};
+ _ ->
+ get_data(Pid, TabRec)
+ end;
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ get_data(Pid, TabRec)
+ end.
+
+init_table(Tab, disc_only_copies, Fun, false, DetsInfo,Sender) ->
+ ErtsVer = erlang:system_info(version),
+ case DetsInfo of
+ {ErtsVer, DetsData} ->
+ Res = (catch dets:is_compatible_bchunk_format(Tab, DetsData)),
+ case Res of
+ {'EXIT',{undef,[{dets,_,_}|_]}} ->
+ Sender ! {self(), {old_protocol, Tab}},
+ dets:init_table(Tab, Fun); %% Old dets version
+ {'EXIT', What} ->
+ exit(What);
+ false ->
+ Sender ! {self(), {old_protocol, Tab}},
+ dets:init_table(Tab, Fun); %% Old dets version
+ true ->
+ dets:init_table(Tab, Fun, [{format, bchunk}])
+ end;
+ Old when Old /= false ->
+ Sender ! {self(), {old_protocol, Tab}},
+ dets:init_table(Tab, Fun); %% Old dets version
+ _ ->
+ dets:init_table(Tab, Fun)
+ end;
+init_table(Tab, _, Fun, false, _DetsInfo,_) ->
+ case catch ets:init_table(Tab, Fun) of
+ true ->
+ ok;
+ {'EXIT', Else} -> Else
+ end;
+init_table(Tab, Storage, Fun, true, _DetsInfo, Sender) -> %% Old Nodes
+ spawn_link(?MODULE, old_node_init_table,
+ [Tab, Storage, Fun, self(), false, Sender]),
+ ok.
+
+old_node_init_table(Tab, Storage, Fun, TabReceiver, DetsInfo,Sender) ->
+ Res = init_table(Tab, Storage, Fun, false, DetsInfo,Sender),
+ TabReceiver ! {old_init_table_complete, Res},
+ unlink(TabReceiver),
+ ok.
+
+finish_copy(Storage,Tab,Cs,SenderPid,DatBin,OrigTabRec) ->
+ TabRef = {Storage, Tab},
+ subscr_receiver(TabRef, Cs#cstruct.record_name),
+ case handle_last(TabRef, Cs#cstruct.type, DatBin) of
+ ok ->
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ %% OrigTabRec must not be the spawned tab-receiver
+ %% due to old protocol.
+ SenderPid ! {OrigTabRec, no_more},
+ mnesia_tm:unblock_tab(Tab),
+ ok;
+ {error, Reason} ->
+ Msg = "Failed to handle last",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage)
+ end.
+
+subscr_receiver(TabRef = {_, Tab}, RecName) ->
+ receive
+ {mnesia_table_event, {Op, Val, _Tid}} ->
+ if
+ Tab == RecName ->
+ handle_event(TabRef, Op, Val);
+ true ->
+ handle_event(TabRef, Op, setelement(1, Val, RecName))
+ end,
+ subscr_receiver(TabRef, RecName);
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ subscr_receiver(TabRef, RecName)
+ after 0 ->
+ ok
+ end.
+
+handle_event(TabRef, write, Rec) ->
+ db_put(TabRef, Rec);
+handle_event(TabRef, delete, {_Tab, Key}) ->
+ db_erase(TabRef, Key);
+handle_event(TabRef, delete_object, OldRec) ->
+ db_match_erase(TabRef, OldRec);
+handle_event(TabRef, clear_table, {_Tab, _Key}) ->
+ db_match_erase(TabRef, '_').
+
+handle_last({disc_copies, Tab}, _Type, nobin) ->
+ Ret = mnesia_log:ets2dcd(Tab),
+ Fname = mnesia_lib:tab2dat(Tab),
+ case mnesia_lib:exists(Fname) of
+ true -> %% Remove old .DAT files.
+ file:delete(Fname);
+ false ->
+ ok
+ end,
+ Ret;
+
+handle_last({disc_only_copies, Tab}, Type, nobin) ->
+ case mnesia_lib:swap_tmp_files([Tab]) of
+ [] ->
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ mnesia_monitor:open_dets(Tab, Args),
+ ok;
+ L when list(L) ->
+ {error, {"Cannot swap tmp files", Tab, L}}
+ end;
+
+handle_last({ram_copies, _Tab}, _Type, nobin) ->
+ ok;
+handle_last({ram_copies, Tab}, _Type, DatBin) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_lib:lock_table(Tab),
+ Tmp = mnesia_lib:tab2tmp(Tab),
+ ok = file:write_file(Tmp, DatBin),
+ ok = file:rename(Tmp, mnesia_lib:tab2dcd(Tab)),
+ mnesia_lib:unlock_table(Tab),
+ ok;
+ false ->
+ ok
+ end.
+
+down(Tab, Storage) ->
+ case Storage of
+ ram_copies ->
+ catch ?ets_delete_table(Tab);
+ disc_copies ->
+ catch ?ets_delete_table(Tab);
+ disc_only_copies ->
+ mnesia_lib:cleanup_tmp_files([Tab])
+ end,
+ mnesia_checkpoint:tm_del_copy(Tab, node()),
+ mnesia_controller:sync_del_table_copy_whereabouts(Tab, node()),
+ mnesia_tm:unblock_tab(Tab),
+ flush_subcrs(),
+ down.
+
+flush_subcrs() ->
+ receive
+ {mnesia_table_event, _} ->
+ flush_subcrs();
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ flush_subcrs()
+ after 0 ->
+ done
+ end.
+
+db_erase({ram_copies, Tab}, Key) ->
+ true = ?ets_delete(Tab, Key);
+db_erase({disc_copies, Tab}, Key) ->
+ true = ?ets_delete(Tab, Key);
+db_erase({disc_only_copies, Tab}, Key) ->
+ ok = dets:delete(Tab, Key).
+
+db_match_erase({ram_copies, Tab} , Pat) ->
+ true = ?ets_match_delete(Tab, Pat);
+db_match_erase({disc_copies, Tab} , Pat) ->
+ true = ?ets_match_delete(Tab, Pat);
+db_match_erase({disc_only_copies, Tab}, Pat) ->
+ ok = dets:match_delete(Tab, Pat).
+
+db_put({ram_copies, Tab}, Val) ->
+ true = ?ets_insert(Tab, Val);
+db_put({disc_copies, Tab}, Val) ->
+ true = ?ets_insert(Tab, Val);
+db_put({disc_only_copies, Tab}, Val) ->
+ ok = dets:insert(Tab, Val).
+
+%% This code executes at the remote site where the data is
+%% executes in a special copier process.
+
+calc_nokeys(Storage, Tab) ->
+ %% Calculate #keys per transfer
+ Key = mnesia_lib:db_first(Storage, Tab),
+ Recs = mnesia_lib:db_get(Storage, Tab, Key),
+ BinSize = size(term_to_binary(Recs)),
+ (?MAX_TRANSFER_SIZE div BinSize) + 1.
+
+send_table(Pid, Tab, RemoteS) ->
+ case ?catch_val({Tab, storage_type}) of
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ unknown ->
+ {error, {no_exists, Tab}};
+ Storage ->
+ %% Send first
+ TabSize = mnesia:table_info(Tab, size),
+ Pconvert = mnesia_monitor:needs_protocol_conversion(node(Pid)),
+ KeysPerTransfer = calc_nokeys(Storage, Tab),
+ ChunkData = dets:info(Tab, bchunk_format),
+
+ UseDetsChunk =
+ Storage == RemoteS andalso
+ Storage == disc_only_copies andalso
+ ChunkData /= undefined andalso
+ Pconvert == false,
+ if
+ UseDetsChunk == true ->
+ DetsInfo = erlang:system_info(version),
+ Pid ! {self(), {first, TabSize, {DetsInfo, ChunkData}}};
+ true ->
+ Pid ! {self(), {first, TabSize}}
+ end,
+
+ %% Debug info
+ put(mnesia_table_sender, {Tab, node(Pid), Pid}),
+ {Init, Chunk} = reader_funcs(UseDetsChunk, Tab, Storage, KeysPerTransfer),
+
+ SendIt = fun() ->
+ prepare_copy(Pid, Tab, Storage),
+ send_more(Pid, 1, Chunk, Init(), Tab, Pconvert),
+ finish_copy(Pid, Tab, Storage, RemoteS)
+ end,
+
+ case catch SendIt() of
+ receiver_died ->
+ cleanup_tab_copier(Pid, Storage, Tab),
+ unlink(whereis(mnesia_tm)),
+ ok;
+ {_, receiver_died} ->
+ unlink(whereis(mnesia_tm)),
+ ok;
+ {'atomic', no_more} ->
+ unlink(whereis(mnesia_tm)),
+ ok;
+ Reason ->
+ cleanup_tab_copier(Pid, Storage, Tab),
+ unlink(whereis(mnesia_tm)),
+ {error, Reason}
+ end
+ end.
+
+prepare_copy(Pid, Tab, Storage) ->
+ Trans =
+ fun() ->
+ mnesia:write_lock_table(Tab),
+ mnesia_subscr:subscribe(Pid, {table, Tab}),
+ update_where_to_write(Tab, node(Pid)),
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+ ok
+ end,
+ case mnesia:transaction(Trans) of
+ {'atomic', ok} ->
+ ok;
+ {aborted, Reason} ->
+ exit({tab_copier_prepare, Tab, Reason})
+ end.
+
+update_where_to_write(Tab, Node) ->
+ case val({Tab, access_mode}) of
+ read_only ->
+ ignore;
+ read_write ->
+ Current = val({current, db_nodes}),
+ Ns =
+ case lists:member(Node, Current) of
+ true -> Current;
+ false -> [Node | Current]
+ end,
+ update_where_to_write(Ns, Tab, Node)
+ end.
+
+update_where_to_write([], _, _) ->
+ ok;
+update_where_to_write([H|T], Tab, AddNode) ->
+ rpc:call(H, mnesia_controller, call,
+ [{update_where_to_write, [add, Tab, AddNode], self()}]),
+ update_where_to_write(T, Tab, AddNode).
+
+send_more(Pid, N, Chunk, DataState, Tab, OldNode) ->
+ receive
+ {NewPid, more} ->
+ case send_packet(N - 1, NewPid, Chunk, DataState, OldNode) of
+ New when integer(New) ->
+ New - 1;
+ NewData ->
+ send_more(NewPid, ?MAX_NOPACKETS, Chunk, NewData, Tab, OldNode)
+ end;
+ {_NewPid, {old_protocol, Tab}} ->
+ Storage = val({Tab, storage_type}),
+ {Init, NewChunk} =
+ reader_funcs(false, Tab, Storage, calc_nokeys(Storage, Tab)),
+ send_more(Pid, 1, NewChunk, Init(), Tab, OldNode);
+
+ {copier_done, Node} when Node == node(Pid)->
+ verbose("Receiver of table ~p crashed on ~p (more)~n", [Tab, Node]),
+ throw(receiver_died)
+ end.
+
+reader_funcs(UseDetsChunk, Tab, Storage, KeysPerTransfer) ->
+ case UseDetsChunk of
+ false ->
+ {fun() -> mnesia_lib:db_init_chunk(Storage, Tab, KeysPerTransfer) end,
+ fun(Cont) -> mnesia_lib:db_chunk(Storage, Cont) end};
+ true ->
+ {fun() -> dets_bchunk(Tab, start) end,
+ fun(Cont) -> dets_bchunk(Tab, Cont) end}
+ end.
+
+dets_bchunk(Tab, Chunk) -> %% Arrg
+ case dets:bchunk(Tab, Chunk) of
+ {Cont, Data} -> {Data, Cont};
+ Else -> Else
+ end.
+
+send_packet(N, Pid, _Chunk, '$end_of_table', OldNode) ->
+ case OldNode of
+ true -> ignore; %% Old nodes can't handle the new no_more
+ false -> Pid ! {self(), no_more}
+ end,
+ N;
+send_packet(N, Pid, Chunk, {[], Cont}, OldNode) ->
+ send_packet(N, Pid, Chunk, Chunk(Cont), OldNode);
+send_packet(N, Pid, Chunk, {Recs, Cont}, OldNode) when N < ?MAX_NOPACKETS ->
+ case OldNode of
+ true -> Pid ! {self(), {more, [Recs]}}; %% Old need's wrapping list
+ false -> Pid ! {self(), {more, Recs}}
+ end,
+ send_packet(N+1, Pid, Chunk, Chunk(Cont), OldNode);
+send_packet(_N, _Pid, _Chunk, DataState, _OldNode) ->
+ DataState.
+
+finish_copy(Pid, Tab, Storage, RemoteS) ->
+ RecNode = node(Pid),
+ DatBin = dat2bin(Tab, Storage, RemoteS),
+ Trans =
+ fun() ->
+ mnesia:read_lock_table(Tab),
+ A = val({Tab, access_mode}),
+ mnesia_controller:sync_and_block_table_whereabouts(Tab, RecNode, RemoteS, A),
+ cleanup_tab_copier(Pid, Storage, Tab),
+ mnesia_checkpoint:tm_add_copy(Tab, RecNode),
+ Pid ! {self(), {no_more, DatBin}},
+ receive
+ {Pid, no_more} -> % Dont bother about the spurious 'more' message
+ no_more;
+ {copier_done, Node} when Node == node(Pid)->
+ verbose("Tab receiver ~p crashed (more): ~p~n", [Tab, Node]),
+ receiver_died
+ end
+ end,
+ mnesia:transaction(Trans).
+
+cleanup_tab_copier(Pid, Storage, Tab) ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_subscr:unsubscribe(Pid, {table, Tab}).
+
+dat2bin(Tab, ram_copies, ram_copies) ->
+ mnesia_lib:lock_table(Tab),
+ Res = file:read_file(mnesia_lib:tab2dcd(Tab)),
+ mnesia_lib:unlock_table(Tab),
+ case Res of
+ {ok, DatBin} -> DatBin;
+ _ -> nobin
+ end;
+dat2bin(_Tab, _LocalS, _RemoteS) ->
+ nobin.
+
+handle_exit(Pid, Reason) when node(Pid) == node() ->
+ exit(Reason);
+handle_exit(_Pid, _Reason) -> %% Not from our node, this will be handled by
+ ignore. %% mnesia_down soon.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_locker.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_locker.erl
new file mode 100644
index 0000000000..8fe08414d0
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_locker.erl
@@ -0,0 +1,1022 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_locker.erl,v 1.2 2009/07/01 15:45:40 kostis Exp $
+%%
+-module(mnesia_locker).
+
+-export([
+ get_held_locks/0,
+ get_lock_queue/0,
+ global_lock/5,
+ ixrlock/5,
+ init/1,
+ mnesia_down/2,
+ release_tid/1,
+ async_release_tid/2,
+ send_release_tid/2,
+ receive_release_tid_acc/2,
+ rlock/3,
+ rlock_table/3,
+ rwlock/3,
+ sticky_rwlock/3,
+ start/0,
+ sticky_wlock/3,
+ sticky_wlock_table/3,
+ wlock/3,
+ wlock_no_exist/4,
+ wlock_table/3
+ ]).
+
+%% sys callback functions
+-export([system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [dbg_out/2, error/2, verbose/2]).
+
+-define(dbg(S,V), ok).
+%-define(dbg(S,V), dbg_out("~p:~p: " ++ S, [?MODULE, ?LINE] ++ V)).
+
+-define(ALL, '______WHOLETABLE_____').
+-define(STICK, '______STICK_____').
+-define(GLOBAL, '______GLOBAL_____').
+
+-record(state, {supervisor}).
+
+-record(queue, {oid, tid, op, pid, lucky}).
+
+%% mnesia_held_locks: contain {Oid, Op, Tid} entries (bag)
+-define(match_oid_held_locks(Oid), {Oid, '_', '_'}).
+%% mnesia_tid_locks: contain {Tid, Oid, Op} entries (bag)
+-define(match_oid_tid_locks(Tid), {Tid, '_', '_'}).
+%% mnesia_sticky_locks: contain {Oid, Node} entries and {Tab, Node} entries (set)
+-define(match_oid_sticky_locks(Oid),{Oid, '_'}).
+%% mnesia_lock_queue: contain {queue, Oid, Tid, Op, ReplyTo, WaitForTid} entries (ordered_set)
+-define(match_oid_lock_queue(Oid), #queue{oid=Oid, tid='_', op = '_', pid = '_', lucky = '_'}).
+%% mnesia_lock_counter: {{write, Tab}, Number} &&
+%% {{read, Tab}, Number} entries (set)
+
+start() ->
+ mnesia_monitor:start_proc(?MODULE, ?MODULE, init, [self()]).
+
+init(Parent) ->
+ register(?MODULE, self()),
+ process_flag(trap_exit, true),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ loop(#state{supervisor = Parent}).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+reply(From, R) ->
+ From ! {?MODULE, node(), R}.
+
+l_request(Node, X, Store) ->
+ {?MODULE, Node} ! {self(), X},
+ l_req_rec(Node, Store).
+
+l_req_rec(Node, Store) ->
+ ?ets_insert(Store, {nodes, Node}),
+ receive
+ {?MODULE, Node, {switch, Node2, Req}} ->
+ ?ets_insert(Store, {nodes, Node2}),
+ {?MODULE, Node2} ! Req,
+ {switch, Node2, Req};
+ {?MODULE, Node, Reply} ->
+ Reply;
+ {mnesia_down, Node} ->
+ {not_granted, {node_not_running, Node}}
+ end.
+
+release_tid(Tid) ->
+ ?MODULE ! {release_tid, Tid}.
+
+async_release_tid(Nodes, Tid) ->
+ rpc:abcast(Nodes, ?MODULE, {release_tid, Tid}).
+
+send_release_tid(Nodes, Tid) ->
+ rpc:abcast(Nodes, ?MODULE, {self(), {sync_release_tid, Tid}}).
+
+receive_release_tid_acc([Node | Nodes], Tid) ->
+ receive
+ {?MODULE, Node, {tid_released, Tid}} ->
+ receive_release_tid_acc(Nodes, Tid);
+ {mnesia_down, Node} ->
+ receive_release_tid_acc(Nodes, Tid)
+ end;
+receive_release_tid_acc([], _Tid) ->
+ ok.
+
+loop(State) ->
+ receive
+ {From, {write, Tid, Oid}} ->
+ try_sticky_lock(Tid, write, From, Oid),
+ loop(State);
+
+ %% If Key == ?ALL it's a request to lock the entire table
+ %%
+
+ {From, {read, Tid, Oid}} ->
+ try_sticky_lock(Tid, read, From, Oid),
+ loop(State);
+
+ %% Really do a read, but get hold of a write lock
+ %% used by mnesia:wread(Oid).
+
+ {From, {read_write, Tid, Oid}} ->
+ try_sticky_lock(Tid, read_write, From, Oid),
+ loop(State);
+
+ %% Tid has somehow terminated, clear up everything
+ %% and pass locks on to queued processes.
+ %% This is the purpose of the mnesia_tid_locks table
+
+ {release_tid, Tid} ->
+ do_release_tid(Tid),
+ loop(State);
+
+ %% stick lock, first tries this to the where_to_read Node
+ {From, {test_set_sticky, Tid, {Tab, _} = Oid, Lock}} ->
+ case ?ets_lookup(mnesia_sticky_locks, Tab) of
+ [] ->
+ reply(From, not_stuck),
+ loop(State);
+ [{_,Node}] when Node == node() ->
+ %% Lock is stuck here, see now if we can just set
+ %% a regular write lock
+ try_lock(Tid, Lock, From, Oid),
+ loop(State);
+ [{_,Node}] ->
+ reply(From, {stuck_elsewhere, Node}),
+ loop(State)
+ end;
+
+ %% If test_set_sticky fails, we send this to all nodes
+ %% after aquiring a real write lock on Oid
+
+ {stick, {Tab, _}, N} ->
+ ?ets_insert(mnesia_sticky_locks, {Tab, N}),
+ loop(State);
+
+ %% The caller which sends this message, must have first
+ %% aquired a write lock on the entire table
+ {unstick, Tab} ->
+ ?ets_delete(mnesia_sticky_locks, Tab),
+ loop(State);
+
+ {From, {ix_read, Tid, Tab, IxKey, Pos}} ->
+ case catch mnesia_index:get_index_table(Tab, Pos) of
+ {'EXIT', _} ->
+ reply(From, {not_granted, {no_exists, Tab, {index, [Pos]}}}),
+ loop(State);
+ Index ->
+ Rk = mnesia_lib:elems(2,mnesia_index:db_get(Index, IxKey)),
+ %% list of real keys
+ case ?ets_lookup(mnesia_sticky_locks, Tab) of
+ [] ->
+ set_read_lock_on_all_keys(Tid, From,Tab,Rk,Rk,
+ []),
+ loop(State);
+ [{_,N}] when N == node() ->
+ set_read_lock_on_all_keys(Tid, From,Tab,Rk,Rk,
+ []),
+ loop(State);
+ [{_,N}] ->
+ Req = {From, {ix_read, Tid, Tab, IxKey, Pos}},
+ From ! {?MODULE, node(), {switch, N, Req}},
+ loop(State)
+ end
+ end;
+
+ {From, {sync_release_tid, Tid}} ->
+ do_release_tid(Tid),
+ reply(From, {tid_released, Tid}),
+ loop(State);
+
+ {release_remote_non_pending, Node, Pending} ->
+ release_remote_non_pending(Node, Pending),
+ mnesia_monitor:mnesia_down(?MODULE, Node),
+ loop(State);
+
+ {'EXIT', Pid, _} when Pid == State#state.supervisor ->
+ do_stop();
+
+ {system, From, Msg} ->
+ verbose("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ Parent = State#state.supervisor,
+ sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], State);
+
+ Msg ->
+ error("~p got unexpected message: ~p~n", [?MODULE, Msg]),
+ loop(State)
+ end.
+
+set_lock(Tid, Oid, Op) ->
+ ?dbg("Granted ~p ~p ~p~n", [Tid,Oid,Op]),
+ ?ets_insert(mnesia_held_locks, {Oid, Op, Tid}),
+ ?ets_insert(mnesia_tid_locks, {Tid, Oid, Op}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Acquire locks
+
+try_sticky_lock(Tid, Op, Pid, {Tab, _} = Oid) ->
+ case ?ets_lookup(mnesia_sticky_locks, Tab) of
+ [] ->
+ try_lock(Tid, Op, Pid, Oid);
+ [{_,N}] when N == node() ->
+ try_lock(Tid, Op, Pid, Oid);
+ [{_,N}] ->
+ Req = {Pid, {Op, Tid, Oid}},
+ Pid ! {?MODULE, node(), {switch, N, Req}}
+ end.
+
+try_lock(Tid, read_write, Pid, Oid) ->
+ try_lock(Tid, read_write, read, write, Pid, Oid);
+try_lock(Tid, Op, Pid, Oid) ->
+ try_lock(Tid, Op, Op, Op, Pid, Oid).
+
+try_lock(Tid, Op, SimpleOp, Lock, Pid, Oid) ->
+ case can_lock(Tid, Lock, Oid, {no, bad_luck}) of
+ yes ->
+ Reply = grant_lock(Tid, SimpleOp, Lock, Oid),
+ reply(Pid, Reply);
+ {no, Lucky} ->
+ C = #cyclic{op = SimpleOp, lock = Lock, oid = Oid, lucky = Lucky},
+ ?dbg("Rejected ~p ~p ~p ~p ~n", [Tid, Oid, Lock, Lucky]),
+ reply(Pid, {not_granted, C});
+ {queue, Lucky} ->
+ ?dbg("Queued ~p ~p ~p ~p ~n", [Tid, Oid, Lock, Lucky]),
+ %% Append to queue: Nice place for trace output
+ ?ets_insert(mnesia_lock_queue,
+ #queue{oid = Oid, tid = Tid, op = Op,
+ pid = Pid, lucky = Lucky}),
+ ?ets_insert(mnesia_tid_locks, {Tid, Oid, {queued, Op}})
+ end.
+
+grant_lock(Tid, read, Lock, {Tab, Key})
+ when Key /= ?ALL, Tab /= ?GLOBAL ->
+ case node(Tid#tid.pid) == node() of
+ true ->
+ set_lock(Tid, {Tab, Key}, Lock),
+ {granted, lookup_in_client};
+ false ->
+ case catch mnesia_lib:db_get(Tab, Key) of %% lookup as well
+ {'EXIT', _Reason} ->
+ %% Table has been deleted from this node,
+ %% restart the transaction.
+ C = #cyclic{op = read, lock = Lock, oid = {Tab, Key},
+ lucky = nowhere},
+ {not_granted, C};
+ Val ->
+ set_lock(Tid, {Tab, Key}, Lock),
+ {granted, Val}
+ end
+ end;
+grant_lock(Tid, read, Lock, Oid) ->
+ set_lock(Tid, Oid, Lock),
+ {granted, ok};
+grant_lock(Tid, write, Lock, Oid) ->
+ set_lock(Tid, Oid, Lock),
+ granted.
+
+%% 1) Impose an ordering on all transactions favour old (low tid) transactions
+%% newer (higher tid) transactions may never wait on older ones,
+%% 2) When releasing the tids from the queue always begin with youngest (high tid)
+%% because of 1) it will avoid the deadlocks.
+%% 3) TabLocks is the problem :-) They should not starve and not deadlock
+%% handle tablocks in queue as they had locks on unlocked records.
+
+can_lock(Tid, read, {Tab, Key}, AlreadyQ) when Key /= ?ALL ->
+ %% The key is bound, no need for the other BIF
+ Oid = {Tab, Key},
+ ObjLocks = ?ets_match_object(mnesia_held_locks, {Oid, write, '_'}),
+ TabLocks = ?ets_match_object(mnesia_held_locks, {{Tab, ?ALL}, write, '_'}),
+ check_lock(Tid, Oid, ObjLocks, TabLocks, yes, AlreadyQ, read);
+
+can_lock(Tid, read, Oid, AlreadyQ) -> % Whole tab
+ Tab = element(1, Oid),
+ ObjLocks = ?ets_match_object(mnesia_held_locks, {{Tab, '_'}, write, '_'}),
+ check_lock(Tid, Oid, ObjLocks, [], yes, AlreadyQ, read);
+
+can_lock(Tid, write, {Tab, Key}, AlreadyQ) when Key /= ?ALL ->
+ Oid = {Tab, Key},
+ ObjLocks = ?ets_lookup(mnesia_held_locks, Oid),
+ TabLocks = ?ets_lookup(mnesia_held_locks, {Tab, ?ALL}),
+ check_lock(Tid, Oid, ObjLocks, TabLocks, yes, AlreadyQ, write);
+
+can_lock(Tid, write, Oid, AlreadyQ) -> % Whole tab
+ Tab = element(1, Oid),
+ ObjLocks = ?ets_match_object(mnesia_held_locks, ?match_oid_held_locks({Tab, '_'})),
+ check_lock(Tid, Oid, ObjLocks, [], yes, AlreadyQ, write).
+
+%% Check held locks for conflicting locks
+check_lock(Tid, Oid, [Lock | Locks], TabLocks, X, AlreadyQ, Type) ->
+ case element(3, Lock) of
+ Tid ->
+ check_lock(Tid, Oid, Locks, TabLocks, X, AlreadyQ, Type);
+ WaitForTid when WaitForTid > Tid -> % Important order
+ check_lock(Tid, Oid, Locks, TabLocks, {queue, WaitForTid}, AlreadyQ, Type);
+ WaitForTid when Tid#tid.pid == WaitForTid#tid.pid ->
+ dbg_out("Spurious lock conflict ~w ~w: ~w -> ~w~n",
+ [Oid, Lock, Tid, WaitForTid]),
+%% check_lock(Tid, Oid, Locks, TabLocks, {queue, WaitForTid}, AlreadyQ);
+ %% BUGBUG Fix this if possible
+ {no, WaitForTid};
+ WaitForTid ->
+ {no, WaitForTid}
+ end;
+
+check_lock(_, _, [], [], X, {queue, bad_luck}, _) ->
+ X; %% The queue should be correct already no need to check it again
+
+check_lock(_, _, [], [], X = {queue, _Tid}, _AlreadyQ, _) ->
+ X;
+
+check_lock(Tid, Oid, [], [], X, AlreadyQ, Type) ->
+ {Tab, Key} = Oid,
+ if
+ Type == write ->
+ check_queue(Tid, Tab, X, AlreadyQ);
+ Key == ?ALL ->
+ %% hmm should be solvable by a clever select expr but not today...
+ check_queue(Tid, Tab, X, AlreadyQ);
+ true ->
+ %% If there is a queue on that object, read_lock shouldn't be granted
+ ObjLocks = ets:lookup(mnesia_lock_queue, Oid),
+ Greatest = max(ObjLocks),
+ case Greatest of
+ empty ->
+ check_queue(Tid, Tab, X, AlreadyQ);
+ ObjL when Tid > ObjL ->
+ {no, ObjL}; %% Starvation Preemption (write waits for read)
+ ObjL ->
+ check_queue(Tid, Tab, {queue, ObjL}, AlreadyQ)
+ end
+ end;
+
+check_lock(Tid, Oid, [], TabLocks, X, AlreadyQ, Type) ->
+ check_lock(Tid, Oid, TabLocks, [], X, AlreadyQ, Type).
+
+%% Check queue for conflicting locks
+%% Assume that all queued locks belongs to other tid's
+
+check_queue(Tid, Tab, X, AlreadyQ) ->
+ TabLocks = ets:lookup(mnesia_lock_queue, {Tab,?ALL}),
+ Greatest = max(TabLocks),
+ case Greatest of
+ empty ->
+ X;
+ Tid ->
+ X;
+ WaitForTid when WaitForTid#queue.tid > Tid -> % Important order
+ {queue, WaitForTid};
+ WaitForTid ->
+ case AlreadyQ of
+ {no, bad_luck} -> {no, WaitForTid};
+ _ ->
+ erlang:error({mnesia_locker, assert, AlreadyQ})
+ end
+ end.
+
+max([]) ->
+ empty;
+max([H|R]) ->
+ max(R, H#queue.tid).
+
+max([H|R], Tid) when H#queue.tid > Tid ->
+ max(R, H#queue.tid);
+max([_|R], Tid) ->
+ max(R, Tid);
+max([], Tid) ->
+ Tid.
+
+%% We can't queue the ixlock requests since it
+%% becomes to complivated for little me :-)
+%% If we encounter an object with a wlock we reject the
+%% entire lock request
+%%
+%% BUGBUG: this is actually a bug since we may starve
+
+set_read_lock_on_all_keys(Tid, From, Tab, [RealKey | Tail], Orig, Ack) ->
+ Oid = {Tab, RealKey},
+ case can_lock(Tid, read, Oid, {no, bad_luck}) of
+ yes ->
+ {granted, Val} = grant_lock(Tid, read, read, Oid),
+ case opt_lookup_in_client(Val, Oid, read) of % Ought to be invoked
+ C when record(C, cyclic) -> % in the client
+ reply(From, {not_granted, C});
+ Val2 ->
+ Ack2 = lists:append(Val2, Ack),
+ set_read_lock_on_all_keys(Tid, From, Tab, Tail, Orig, Ack2)
+ end;
+ {no, Lucky} ->
+ C = #cyclic{op = read, lock = read, oid = Oid, lucky = Lucky},
+ reply(From, {not_granted, C});
+ {queue, Lucky} ->
+ C = #cyclic{op = read, lock = read, oid = Oid, lucky = Lucky},
+ reply(From, {not_granted, C})
+ end;
+set_read_lock_on_all_keys(_Tid, From, _Tab, [], Orig, Ack) ->
+ reply(From, {granted, Ack, Orig}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Release of locks
+
+%% Release remote non-pending nodes
+release_remote_non_pending(Node, Pending) ->
+ %% Clear the mnesia_sticky_locks table first, to avoid
+ %% unnecessary requests to the failing node
+ ?ets_match_delete(mnesia_sticky_locks, {'_' , Node}),
+
+ %% Then we have to release all locks held by processes
+ %% running at the failed node and also simply remove all
+ %% queue'd requests back to the failed node
+
+ AllTids = ?ets_match(mnesia_tid_locks, {'$1', '_', '_'}),
+ Tids = [T || [T] <- AllTids, Node == node(T#tid.pid), not lists:member(T, Pending)],
+ do_release_tids(Tids).
+
+do_release_tids([Tid | Tids]) ->
+ do_release_tid(Tid),
+ do_release_tids(Tids);
+do_release_tids([]) ->
+ ok.
+
+do_release_tid(Tid) ->
+ Locks = ?ets_lookup(mnesia_tid_locks, Tid),
+ ?dbg("Release ~p ~p ~n", [Tid, Locks]),
+ ?ets_delete(mnesia_tid_locks, Tid),
+ release_locks(Locks),
+ %% Removed queued locks which has had locks
+ UniqueLocks = keyunique(lists:sort(Locks),[]),
+ rearrange_queue(UniqueLocks).
+
+keyunique([{_Tid, Oid, _Op}|R], Acc = [{_, Oid, _}|_]) ->
+ keyunique(R, Acc);
+keyunique([H|R], Acc) ->
+ keyunique(R, [H|Acc]);
+keyunique([], Acc) ->
+ Acc.
+
+release_locks([Lock | Locks]) ->
+ release_lock(Lock),
+ release_locks(Locks);
+release_locks([]) ->
+ ok.
+
+release_lock({Tid, Oid, {queued, _}}) ->
+ ?ets_match_delete(mnesia_lock_queue,
+ #queue{oid=Oid, tid = Tid, op = '_',
+ pid = '_', lucky = '_'});
+release_lock({Tid, Oid, Op}) ->
+ if
+ Op == write ->
+ ?ets_delete(mnesia_held_locks, Oid);
+ Op == read ->
+ ?ets_match_delete(mnesia_held_locks, {Oid, Op, Tid})
+ end.
+
+rearrange_queue([{_Tid, {Tab, Key}, _} | Locks]) ->
+ if
+ Key /= ?ALL->
+ Queue =
+ ets:lookup(mnesia_lock_queue, {Tab, ?ALL}) ++
+ ets:lookup(mnesia_lock_queue, {Tab, Key}),
+ case Queue of
+ [] ->
+ ok;
+ _ ->
+ Sorted = lists:reverse(lists:keysort(#queue.tid, Queue)),
+ try_waiters_obj(Sorted)
+ end;
+ true ->
+ Pat = ?match_oid_lock_queue({Tab, '_'}),
+ Queue = ?ets_match_object(mnesia_lock_queue, Pat),
+ Sorted = lists:reverse(lists:keysort(#queue.tid, Queue)),
+ try_waiters_tab(Sorted)
+ end,
+ ?dbg("RearrQ ~p~n", [Queue]),
+ rearrange_queue(Locks);
+rearrange_queue([]) ->
+ ok.
+
+try_waiters_obj([W | Waiters]) ->
+ case try_waiter(W) of
+ queued ->
+ no;
+ _ ->
+ try_waiters_obj(Waiters)
+ end;
+try_waiters_obj([]) ->
+ ok.
+
+try_waiters_tab([W | Waiters]) ->
+ case W#queue.oid of
+ {_Tab, ?ALL} ->
+ case try_waiter(W) of
+ queued ->
+ no;
+ _ ->
+ try_waiters_tab(Waiters)
+ end;
+ Oid ->
+ case try_waiter(W) of
+ queued ->
+ Rest = key_delete_all(Oid, #queue.oid, Waiters),
+ try_waiters_tab(Rest);
+ _ ->
+ try_waiters_tab(Waiters)
+ end
+ end;
+try_waiters_tab([]) ->
+ ok.
+
+try_waiter({queue, Oid, Tid, read_write, ReplyTo, _}) ->
+ try_waiter(Oid, read_write, read, write, ReplyTo, Tid);
+try_waiter({queue, Oid, Tid, Op, ReplyTo, _}) ->
+ try_waiter(Oid, Op, Op, Op, ReplyTo, Tid).
+
+try_waiter(Oid, Op, SimpleOp, Lock, ReplyTo, Tid) ->
+ case can_lock(Tid, Lock, Oid, {queue, bad_luck}) of
+ yes ->
+ %% Delete from queue: Nice place for trace output
+ ?ets_match_delete(mnesia_lock_queue,
+ #queue{oid=Oid, tid = Tid, op = Op,
+ pid = ReplyTo, lucky = '_'}),
+ Reply = grant_lock(Tid, SimpleOp, Lock, Oid),
+ ReplyTo ! {?MODULE, node(), Reply},
+ locked;
+ {queue, _Why} ->
+ ?dbg("Keep ~p ~p ~p ~p~n", [Tid, Oid, Lock, _Why]),
+ queued; % Keep waiter in queue
+ {no, Lucky} ->
+ C = #cyclic{op = SimpleOp, lock = Lock, oid = Oid, lucky = Lucky},
+ verbose("** WARNING ** Restarted transaction, possible deadlock in lock queue ~w: cyclic = ~w~n",
+ [Tid, C]),
+ ?ets_match_delete(mnesia_lock_queue,
+ #queue{oid=Oid, tid = Tid, op = Op,
+ pid = ReplyTo, lucky = '_'}),
+ Reply = {not_granted, C},
+ ReplyTo ! {?MODULE, node(), Reply},
+ removed
+ end.
+
+key_delete_all(Key, Pos, TupleList) ->
+ key_delete_all(Key, Pos, TupleList, []).
+key_delete_all(Key, Pos, [H|T], Ack) when element(Pos, H) == Key ->
+ key_delete_all(Key, Pos, T, Ack);
+key_delete_all(Key, Pos, [H|T], Ack) ->
+ key_delete_all(Key, Pos, T, [H|Ack]);
+key_delete_all(_, _, [], Ack) ->
+ lists:reverse(Ack).
+
+
+%% ********************* end server code ********************
+%% The following code executes at the client side of a transactions
+
+mnesia_down(N, Pending) ->
+ case whereis(?MODULE) of
+ undefined ->
+ %% Takes care of mnesia_down's in early startup
+ mnesia_monitor:mnesia_down(?MODULE, N);
+ Pid ->
+ %% Syncronously call needed in order to avoid
+ %% race with mnesia_tm's coordinator processes
+ %% that may restart and acquire new locks.
+ %% mnesia_monitor ensures the sync.
+ Pid ! {release_remote_non_pending, N, Pending}
+ end.
+
+%% Aquire a write lock, but do a read, used by
+%% mnesia:wread/1
+
+rwlock(Tid, Store, Oid) ->
+ {Tab, Key} = Oid,
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ mnesia:abort({no_exists, Tab});
+ Node ->
+ Lock = write,
+ case need_lock(Store, Tab, Key, Lock) of
+ yes ->
+ Ns = w_nodes(Tab),
+ Res = get_rwlocks_on_nodes(Ns, Ns, Node, Store, Tid, Oid),
+ ?ets_insert(Store, {{locks, Tab, Key}, Lock}),
+ Res;
+ no ->
+ if
+ Key == ?ALL ->
+ w_nodes(Tab);
+ Tab == ?GLOBAL ->
+ w_nodes(Tab);
+ true ->
+ dirty_rpc(Node, Tab, Key, Lock)
+ end
+ end
+ end.
+
+get_rwlocks_on_nodes([Node | Tail], Orig, Node, Store, Tid, Oid) ->
+ Op = {self(), {read_write, Tid, Oid}},
+ {?MODULE, Node} ! Op,
+ ?ets_insert(Store, {nodes, Node}),
+ add_debug(Node),
+ get_rwlocks_on_nodes(Tail, Orig, Node, Store, Tid, Oid);
+get_rwlocks_on_nodes([Node | Tail], Orig, OtherNode, Store, Tid, Oid) ->
+ Op = {self(), {write, Tid, Oid}},
+ {?MODULE, Node} ! Op,
+ add_debug(Node),
+ ?ets_insert(Store, {nodes, Node}),
+ get_rwlocks_on_nodes(Tail, Orig, OtherNode, Store, Tid, Oid);
+get_rwlocks_on_nodes([], Orig, _Node, Store, _Tid, Oid) ->
+ receive_wlocks(Orig, read_write_lock, Store, Oid).
+
+%% Return a list of nodes or abort transaction
+%% WE also insert any additional where_to_write nodes
+%% in the local store under the key == nodes
+
+w_nodes(Tab) ->
+ Nodes = ?catch_val({Tab, where_to_write}),
+ case Nodes of
+ [_ | _] -> Nodes;
+ _ -> mnesia:abort({no_exists, Tab})
+ end.
+
+%% aquire a sticky wlock, a sticky lock is a lock
+%% which remains at this node after the termination of the
+%% transaction.
+
+sticky_wlock(Tid, Store, Oid) ->
+ sticky_lock(Tid, Store, Oid, write).
+
+sticky_rwlock(Tid, Store, Oid) ->
+ sticky_lock(Tid, Store, Oid, read_write).
+
+sticky_lock(Tid, Store, {Tab, Key} = Oid, Lock) ->
+ N = val({Tab, where_to_read}),
+ if
+ node() == N ->
+ case need_lock(Store, Tab, Key, write) of
+ yes ->
+ do_sticky_lock(Tid, Store, Oid, Lock);
+ no ->
+ dirty_sticky_lock(Tab, Key, [N], Lock)
+ end;
+ true ->
+ mnesia:abort({not_local, Tab})
+ end.
+
+do_sticky_lock(Tid, Store, {Tab, Key} = Oid, Lock) ->
+ ?MODULE ! {self(), {test_set_sticky, Tid, Oid, Lock}},
+ receive
+ {?MODULE, _N, granted} ->
+ ?ets_insert(Store, {{locks, Tab, Key}, write}),
+ granted;
+ {?MODULE, _N, {granted, Val}} -> %% for rwlocks
+ case opt_lookup_in_client(Val, Oid, write) of
+ C when record(C, cyclic) ->
+ exit({aborted, C});
+ Val2 ->
+ ?ets_insert(Store, {{locks, Tab, Key}, write}),
+ Val2
+ end;
+ {?MODULE, _N, {not_granted, Reason}} ->
+ exit({aborted, Reason});
+ {?MODULE, N, not_stuck} ->
+ not_stuck(Tid, Store, Tab, Key, Oid, Lock, N),
+ dirty_sticky_lock(Tab, Key, [N], Lock);
+ {mnesia_down, N} ->
+ exit({aborted, {node_not_running, N}});
+ {?MODULE, N, {stuck_elsewhere, _N2}} ->
+ stuck_elsewhere(Tid, Store, Tab, Key, Oid, Lock),
+ dirty_sticky_lock(Tab, Key, [N], Lock)
+ end.
+
+not_stuck(Tid, Store, Tab, _Key, Oid, _Lock, N) ->
+ rlock(Tid, Store, {Tab, ?ALL}), %% needed?
+ wlock(Tid, Store, Oid), %% perfect sync
+ wlock(Tid, Store, {Tab, ?STICK}), %% max one sticker/table
+ Ns = val({Tab, where_to_write}),
+ rpc:abcast(Ns, ?MODULE, {stick, Oid, N}).
+
+stuck_elsewhere(Tid, Store, Tab, _Key, Oid, _Lock) ->
+ rlock(Tid, Store, {Tab, ?ALL}), %% needed?
+ wlock(Tid, Store, Oid), %% perfect sync
+ wlock(Tid, Store, {Tab, ?STICK}), %% max one sticker/table
+ Ns = val({Tab, where_to_write}),
+ rpc:abcast(Ns, ?MODULE, {unstick, Tab}).
+
+dirty_sticky_lock(Tab, Key, Nodes, Lock) ->
+ if
+ Lock == read_write ->
+ mnesia_lib:db_get(Tab, Key);
+ Key == ?ALL ->
+ Nodes;
+ Tab == ?GLOBAL ->
+ Nodes;
+ true ->
+ ok
+ end.
+
+sticky_wlock_table(Tid, Store, Tab) ->
+ sticky_lock(Tid, Store, {Tab, ?ALL}, write).
+
+%% aquire a wlock on Oid
+%% We store a {Tabname, write, Tid} in all locktables
+%% on all nodes containing a copy of Tabname
+%% We also store an item {{locks, Tab, Key}, write} in the
+%% local store when we have aquired the lock.
+%%
+wlock(Tid, Store, Oid) ->
+ {Tab, Key} = Oid,
+ case need_lock(Store, Tab, Key, write) of
+ yes ->
+ Ns = w_nodes(Tab),
+ Op = {self(), {write, Tid, Oid}},
+ ?ets_insert(Store, {{locks, Tab, Key}, write}),
+ get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid);
+ no when Key /= ?ALL, Tab /= ?GLOBAL ->
+ [];
+ no ->
+ w_nodes(Tab)
+ end.
+
+wlock_table(Tid, Store, Tab) ->
+ wlock(Tid, Store, {Tab, ?ALL}).
+
+%% Write lock even if the table does not exist
+
+wlock_no_exist(Tid, Store, Tab, Ns) ->
+ Oid = {Tab, ?ALL},
+ Op = {self(), {write, Tid, Oid}},
+ get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid).
+
+need_lock(Store, Tab, Key, LockPattern) ->
+ TabL = ?ets_match_object(Store, {{locks, Tab, ?ALL}, LockPattern}),
+ if
+ TabL == [] ->
+ KeyL = ?ets_match_object(Store, {{locks, Tab, Key}, LockPattern}),
+ if
+ KeyL == [] ->
+ yes;
+ true ->
+ no
+ end;
+ true ->
+ no
+ end.
+
+add_debug(Node) -> % Use process dictionary for debug info
+ case get(mnesia_wlock_nodes) of
+ undefined ->
+ put(mnesia_wlock_nodes, [Node]);
+ NodeList ->
+ put(mnesia_wlock_nodes, [Node|NodeList])
+ end.
+
+del_debug(Node) ->
+ case get(mnesia_wlock_nodes) of
+ undefined -> % Shouldn't happen
+ ignore;
+ [Node] ->
+ erase(mnesia_wlock_nodes);
+ List ->
+ put(mnesia_wlock_nodes, lists:delete(Node, List))
+ end.
+
+%% We first send lock requests to the lockmanagers on all
+%% nodes holding a copy of the table
+
+get_wlocks_on_nodes([Node | Tail], Orig, Store, Request, Oid) ->
+ {?MODULE, Node} ! Request,
+ ?ets_insert(Store, {nodes, Node}),
+ add_debug(Node),
+ get_wlocks_on_nodes(Tail, Orig, Store, Request, Oid);
+get_wlocks_on_nodes([], Orig, Store, _Request, Oid) ->
+ receive_wlocks(Orig, Orig, Store, Oid).
+
+receive_wlocks([Node | Tail], Res, Store, Oid) ->
+ receive
+ {?MODULE, Node, granted} ->
+ del_debug(Node),
+ receive_wlocks(Tail, Res, Store, Oid);
+ {?MODULE, Node, {granted, Val}} -> %% for rwlocks
+ del_debug(Node),
+ case opt_lookup_in_client(Val, Oid, write) of
+ C when record(C, cyclic) ->
+ flush_remaining(Tail, Node, {aborted, C});
+ Val2 ->
+ receive_wlocks(Tail, Val2, Store, Oid)
+ end;
+ {?MODULE, Node, {not_granted, Reason}} ->
+ del_debug(Node),
+ Reason1 = {aborted, Reason},
+ flush_remaining(Tail, Node, Reason1);
+ {mnesia_down, Node} ->
+ del_debug(Node),
+ Reason1 = {aborted, {node_not_running, Node}},
+ flush_remaining(Tail, Node, Reason1);
+ {?MODULE, Node, {switch, Node2, Req}} -> %% for rwlocks
+ del_debug(Node),
+ add_debug(Node2),
+ ?ets_insert(Store, {nodes, Node2}),
+ {?MODULE, Node2} ! Req,
+ receive_wlocks([Node2 | Tail], Res, Store, Oid)
+ end;
+
+receive_wlocks([], Res, _Store, _Oid) ->
+ Res.
+
+flush_remaining([], _SkipNode, Res) ->
+ exit(Res);
+flush_remaining([SkipNode | Tail ], SkipNode, Res) ->
+ del_debug(SkipNode),
+ flush_remaining(Tail, SkipNode, Res);
+flush_remaining([Node | Tail], SkipNode, Res) ->
+ receive
+ {?MODULE, Node, _} ->
+ del_debug(Node),
+ flush_remaining(Tail, SkipNode, Res);
+ {mnesia_down, Node} ->
+ del_debug(Node),
+ flush_remaining(Tail, SkipNode, {aborted, {node_not_running, Node}})
+ end.
+
+opt_lookup_in_client(lookup_in_client, Oid, Lock) ->
+ {Tab, Key} = Oid,
+ case catch mnesia_lib:db_get(Tab, Key) of
+ {'EXIT', _} ->
+ %% Table has been deleted from this node,
+ %% restart the transaction.
+ #cyclic{op = read, lock = Lock, oid = Oid, lucky = nowhere};
+ Val ->
+ Val
+ end;
+opt_lookup_in_client(Val, _Oid, _Lock) ->
+ Val.
+
+return_granted_or_nodes({_, ?ALL} , Nodes) -> Nodes;
+return_granted_or_nodes({?GLOBAL, _}, Nodes) -> Nodes;
+return_granted_or_nodes(_ , _Nodes) -> granted.
+
+%% We store a {Tab, read, From} item in the
+%% locks table on the node where we actually do pick up the object
+%% and we also store an item {lock, Oid, read} in our local store
+%% so that we can release any locks we hold when we commit.
+%% This function not only aquires a read lock, but also reads the object
+
+%% Oid's are always {Tab, Key} tuples
+rlock(Tid, Store, Oid) ->
+ {Tab, Key} = Oid,
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ mnesia:abort({no_exists, Tab});
+ Node ->
+ case need_lock(Store, Tab, Key, '_') of
+ yes ->
+ R = l_request(Node, {read, Tid, Oid}, Store),
+ rlock_get_reply(Node, Store, Oid, R);
+ no ->
+ if
+ Key == ?ALL ->
+ [Node];
+ Tab == ?GLOBAL ->
+ [Node];
+ true ->
+ dirty_rpc(Node, Tab, Key, read)
+ end
+ end
+ end.
+
+dirty_rpc(nowhere, Tab, Key, _Lock) ->
+ mnesia:abort({no_exists, {Tab, Key}});
+dirty_rpc(Node, _Tab, ?ALL, _Lock) ->
+ [Node];
+dirty_rpc(Node, ?GLOBAL, _Key, _Lock) ->
+ [Node];
+dirty_rpc(Node, Tab, Key, Lock) ->
+ Args = [Tab, Key],
+ case rpc:call(Node, mnesia_lib, db_get, Args) of
+ {badrpc, Reason} ->
+ case val({Tab, where_to_read}) of
+ Node ->
+ ErrorTag = mnesia_lib:dirty_rpc_error_tag(Reason),
+ mnesia:abort({ErrorTag, Args});
+ _NewNode ->
+ %% Table has been deleted from the node,
+ %% restart the transaction.
+ C = #cyclic{op = read, lock = Lock, oid = {Tab, Key}, lucky = nowhere},
+ exit({aborted, C})
+ end;
+ Other ->
+ Other
+ end.
+
+rlock_get_reply(Node, Store, Oid, {granted, V}) ->
+ {Tab, Key} = Oid,
+ ?ets_insert(Store, {{locks, Tab, Key}, read}),
+ ?ets_insert(Store, {nodes, Node}),
+ case opt_lookup_in_client(V, Oid, read) of
+ C when record(C, cyclic) ->
+ mnesia:abort(C);
+ Val ->
+ Val
+ end;
+rlock_get_reply(Node, Store, Oid, granted) ->
+ {Tab, Key} = Oid,
+ ?ets_insert(Store, {{locks, Tab, Key}, read}),
+ ?ets_insert(Store, {nodes, Node}),
+ return_granted_or_nodes(Oid, [Node]);
+rlock_get_reply(Node, Store, Tab, {granted, V, RealKeys}) ->
+ L = fun(K) -> ?ets_insert(Store, {{locks, Tab, K}, read}) end,
+ lists:foreach(L, RealKeys),
+ ?ets_insert(Store, {nodes, Node}),
+ V;
+rlock_get_reply(_Node, _Store, _Oid, {not_granted , Reason}) ->
+ exit({aborted, Reason});
+
+rlock_get_reply(_Node, Store, Oid, {switch, N2, Req}) ->
+ ?ets_insert(Store, {nodes, N2}),
+ {?MODULE, N2} ! Req,
+ rlock_get_reply(N2, Store, Oid, l_req_rec(N2, Store)).
+
+
+rlock_table(Tid, Store, Tab) ->
+ rlock(Tid, Store, {Tab, ?ALL}).
+
+ixrlock(Tid, Store, Tab, IxKey, Pos) ->
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ mnesia:abort({no_exists, Tab});
+ Node ->
+ R = l_request(Node, {ix_read, Tid, Tab, IxKey, Pos}, Store),
+ rlock_get_reply(Node, Store, Tab, R)
+ end.
+
+%% Grabs the locks or exits
+global_lock(Tid, Store, Item, write, Ns) ->
+ Oid = {?GLOBAL, Item},
+ Op = {self(), {write, Tid, Oid}},
+ get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid);
+global_lock(Tid, Store, Item, read, Ns) ->
+ Oid = {?GLOBAL, Item},
+ send_requests(Ns, {read, Tid, Oid}),
+ rec_requests(Ns, Oid, Store),
+ Ns.
+
+send_requests([Node | Nodes], X) ->
+ {?MODULE, Node} ! {self(), X},
+ send_requests(Nodes, X);
+send_requests([], _X) ->
+ ok.
+
+rec_requests([Node | Nodes], Oid, Store) ->
+ Res = l_req_rec(Node, Store),
+ case catch rlock_get_reply(Node, Store, Oid, Res) of
+ {'EXIT', Reason} ->
+ flush_remaining(Nodes, Node, Reason);
+ _ ->
+ rec_requests(Nodes, Oid, Store)
+ end;
+rec_requests([], _Oid, _Store) ->
+ ok.
+
+get_held_locks() ->
+ ?ets_match_object(mnesia_held_locks, '_').
+
+get_lock_queue() ->
+ Q = ?ets_match_object(mnesia_lock_queue, '_'),
+ [{Oid, Op, Pid, Tid, WFT} || {queue, Oid, Tid, Op, Pid, WFT} <- Q].
+
+do_stop() ->
+ exit(shutdown).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, State) ->
+ loop(State).
+
+system_terminate(_Reason, _Parent, _Debug, _State) ->
+ do_stop().
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_log.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_log.erl
new file mode 100644
index 0000000000..79bd8d3812
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_log.erl
@@ -0,0 +1,1019 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_log.erl,v 1.2 2009/07/01 15:45:40 kostis Exp $
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% This module administers three kinds of log files:
+%%
+%% 1 The transaction log
+%% mnesia_tm appends to the log (via mnesia_log) at the
+%% end of each transaction (or dirty write) and
+%% mnesia_dumper reads the log and performs the ops in
+%% the dat files. The dump_log is done at startup and
+%% at intervals controlled by the user.
+%%
+%% 2 The mnesia_down log
+%% mnesia_tm appends to the log (via mnesia_log) when it
+%% realizes that mnesia goes up or down on another node.
+%% mnesia_init reads the log (via mnesia_log) at startup.
+%%
+%% 3 The backup log
+%% mnesia_schema produces one tiny log when the schema is
+%% initially created. mnesia_schema also reads the log
+%% when the user wants tables (possibly incl the schema)
+%% to be restored. mnesia_log appends to the log when the
+%% user wants to produce a real backup.
+%%
+%% The actual access to the backup media is performed via the
+%% mnesia_backup module for both read and write. mnesia_backup
+%% uses the disk_log (*), BUT the user may write an own module
+%% with the same interface as mnesia_backup and configure
+%% Mnesia so the alternate module performs the actual accesses
+%% to the backup media. This means that the user may put the
+%% backup on medias that Mnesia does not know about possibly on
+%% hosts where Erlang is not running.
+%%
+%% All these logs have to some extent a common structure.
+%% They are all using the disk_log module (*) for the basic
+%% file structure. The disk_log has a repair feature that
+%% can be used to skip erroneous log records if one comes to
+%% the conclusion that it is more important to reuse some
+%% of the log records than the risque of obtaining inconsistent
+%% data. If the data becomes inconsistent it is solely up to the
+%% application to make it consistent again. The automatic
+%% reparation of the disk_log is very powerful, but use it
+%% with extreme care.
+%%
+%% First in all Mnesia's log file is a mnesia log header.
+%% It contains a list with a log_header record as single
+%% element. The structure of the log_header may never be
+%% changed since it may be written to very old backup files.
+%% By holding this record definition stable we can be
+%% able to comprahend backups from timepoint 0. It also
+%% allows us to use the backup format as an interchange
+%% format between Mnesia releases.
+%%
+%% An op-list is a list of tuples with arity 3. Each tuple
+%% has this structure: {Oid, Recs, Op} where Oid is the tuple
+%% {Tab, Key}, Recs is a (possibly empty) list of records and
+%% Op is an atom.
+%%
+%% The log file structure for the transaction log is as follows.
+%%
+%% After the mnesia log section follows an extended record section
+%% containing op-lists. There are several values that Op may
+%% have, such as write, delete, update_counter, delete_object,
+%% and replace. There is no special end of section marker.
+%%
+%% +-----------------+
+%% | mnesia log head |
+%% +-----------------+
+%% | extended record |
+%% | section |
+%% +-----------------+
+%%
+%% The log file structure for the mnesia_down log is as follows.
+%%
+%% After the mnesia log section follows a mnesia_down section
+%% containg lists with yoyo records as single element.
+%%
+%% +-----------------+
+%% | mnesia log head |
+%% +-----------------+
+%% | mnesia_down |
+%% | section |
+%% +-----------------+
+%%
+%% The log file structure for the backup log is as follows.
+%%
+%% After the mnesia log section follows a schema section
+%% containing record lists. A record list is a list of tuples
+%% where {schema, Tab} is interpreted as a delete_table(Tab) and
+%% {schema, Tab, CreateList} are interpreted as create_table.
+%%
+%% The record section also contains record lists. In this section
+%% {Tab, Key} is interpreted as delete({Tab, Key}) and other tuples
+%% as write(Tuple). There is no special end of section marker.
+%%
+%% +-----------------+
+%% | mnesia log head |
+%% +-----------------+
+%% | schema section |
+%% +-----------------+
+%% | record section |
+%% +-----------------+
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-module(mnesia_log).
+
+-export([
+ append/2,
+ backup/1,
+ backup/2,
+ backup_checkpoint/2,
+ backup_checkpoint/3,
+ backup_log_header/0,
+ backup_master/2,
+ chunk_decision_log/1,
+ chunk_decision_tab/1,
+ chunk_log/1,
+ chunk_log/2,
+ close_decision_log/0,
+ close_decision_tab/0,
+ close_log/1,
+ unsafe_close_log/1,
+ confirm_log_dump/1,
+ confirm_decision_log_dump/0,
+ previous_log_file/0,
+ previous_decision_log_file/0,
+ latest_log_file/0,
+ decision_log_version/0,
+ decision_log_file/0,
+ decision_tab_file/0,
+ decision_tab_version/0,
+ dcl_version/0,
+ dcd_version/0,
+ ets2dcd/1,
+ ets2dcd/2,
+ dcd2ets/1,
+ dcd2ets/2,
+ init/0,
+ init_log_dump/0,
+ log/1,
+ slog/1,
+ log_decision/1,
+ log_files/0,
+ open_decision_log/0,
+ trans_log_header/0,
+ open_decision_tab/0,
+ dcl_log_header/0,
+ dcd_log_header/0,
+ open_log/4,
+ open_log/6,
+ prepare_decision_log_dump/0,
+ prepare_log_dump/1,
+ save_decision_tab/1,
+ purge_all_logs/0,
+ purge_some_logs/0,
+ stop/0,
+ tab_copier/3,
+ version/0,
+ view/0,
+ view/1,
+ write_trans_log_header/0
+ ]).
+
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [val/1, dir/1]).
+-import(mnesia_lib, [exists/1, fatal/2, error/2, dbg_out/2]).
+
+trans_log_header() -> log_header(trans_log, version()).
+backup_log_header() -> log_header(backup_log, "1.2").
+decision_log_header() -> log_header(decision_log, decision_log_version()).
+decision_tab_header() -> log_header(decision_tab, decision_tab_version()).
+dcl_log_header() -> log_header(dcl_log, dcl_version()).
+dcd_log_header() -> log_header(dcd_log, dcd_version()).
+
+log_header(Kind, Version) ->
+ #log_header{log_version=Version,
+ log_kind=Kind,
+ mnesia_version=mnesia:system_info(version),
+ node=node(),
+ now=now()}.
+
+version() -> "4.3".
+
+decision_log_version() -> "3.0".
+
+decision_tab_version() -> "1.0".
+
+dcl_version() -> "1.0".
+dcd_version() -> "1.0".
+
+append(Log, Bin) when binary(Bin) ->
+ disk_log:balog(Log, Bin);
+append(Log, Term) ->
+ disk_log:alog(Log, Term).
+
+%% Synced append
+sappend(Log, Bin) when binary(Bin) ->
+ ok = disk_log:blog(Log, Bin);
+sappend(Log, Term) ->
+ ok = disk_log:log(Log, Term).
+
+%% Write commit records to the latest_log
+log(C) when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [],
+ C#commit.schema_ops == [] ->
+ ignore;
+log(C) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ if
+ record(C, commit) ->
+ C2 = C#commit{ram_copies = [], snmp = []},
+ append(latest_log, C2);
+ true ->
+ %% Either a commit record as binary
+ %% or some decision related info
+ append(latest_log, C)
+ end,
+ mnesia_dumper:incr_log_writes();
+ false ->
+ ignore
+ end.
+
+%% Synced
+
+slog(C) when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [],
+ C#commit.schema_ops == [] ->
+ ignore;
+slog(C) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ if
+ record(C, commit) ->
+ C2 = C#commit{ram_copies = [], snmp = []},
+ sappend(latest_log, C2);
+ true ->
+ %% Either a commit record as binary
+ %% or some decision related info
+ sappend(latest_log, C)
+ end,
+ mnesia_dumper:incr_log_writes();
+ false ->
+ ignore
+ end.
+
+
+%% Stuff related to the file LOG
+
+%% Returns a list of logfiles. The oldest is first.
+log_files() -> [previous_log_file(),
+ latest_log_file(),
+ decision_tab_file()
+ ].
+
+latest_log_file() -> dir(latest_log_name()).
+
+previous_log_file() -> dir("PREVIOUS.LOG").
+
+decision_log_file() -> dir(decision_log_name()).
+
+decision_tab_file() -> dir(decision_tab_name()).
+
+previous_decision_log_file() -> dir("PDECISION.LOG").
+
+latest_log_name() -> "LATEST.LOG".
+
+decision_log_name() -> "DECISION.LOG".
+
+decision_tab_name() -> "DECISION_TAB.LOG".
+
+init() ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Prev = previous_log_file(),
+ verify_no_exists(Prev),
+
+ Latest = latest_log_file(),
+ verify_no_exists(Latest),
+
+ Header = trans_log_header(),
+ open_log(latest_log, Header, Latest);
+ false ->
+ ok
+ end.
+
+verify_no_exists(Fname) ->
+ case exists(Fname) of
+ false ->
+ ok;
+ true ->
+ fatal("Log file exists: ~p~n", [Fname])
+ end.
+
+open_log(Name, Header, Fname) ->
+ Exists = exists(Fname),
+ open_log(Name, Header, Fname, Exists).
+
+open_log(Name, Header, Fname, Exists) ->
+ Repair = mnesia_monitor:get_env(auto_repair),
+ open_log(Name, Header, Fname, Exists, Repair).
+
+open_log(Name, Header, Fname, Exists, Repair) ->
+ case Name == previous_log of
+ true ->
+ open_log(Name, Header, Fname, Exists, Repair, read_only);
+ false ->
+ open_log(Name, Header, Fname, Exists, Repair, read_write)
+ end.
+
+open_log(Name, Header, Fname, Exists, Repair, Mode) ->
+ Args = [{file, Fname}, {name, Name}, {repair, Repair}, {mode, Mode}],
+%% io:format("~p:open_log: ~p ~p~n", [?MODULE, Name, Fname]),
+ case mnesia_monitor:open_log(Args) of
+ {ok, Log} when Exists == true ->
+ Log;
+ {ok, Log} ->
+ write_header(Log, Header),
+ Log;
+ {repaired, Log, _, {badbytes, 0}} when Exists == true ->
+ Log;
+ {repaired, Log, _, {badbytes, 0}} ->
+ write_header(Log, Header),
+ Log;
+ {repaired, Log, _Recover, BadBytes} ->
+ mnesia_lib:important("Data may be missing, log ~p repaired: Lost ~p bytes~n",
+ [Fname, BadBytes]),
+ Log;
+ {error, Reason} when Repair == true ->
+ file:delete(Fname),
+ mnesia_lib:important("Data may be missing, Corrupt logfile deleted: ~p, ~p ~n",
+ [Fname, Reason]),
+ %% Create a new
+ open_log(Name, Header, Fname, false, false, read_write);
+ {error, Reason} ->
+ fatal("Cannot open log file ~p: ~p~n", [Fname, Reason])
+ end.
+
+write_header(Log, Header) ->
+ append(Log, Header).
+
+write_trans_log_header() ->
+ write_header(latest_log, trans_log_header()).
+
+stop() ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ close_log(latest_log);
+ false ->
+ ok
+ end.
+
+close_log(Log) ->
+%% io:format("mnesia_log:close_log ~p~n", [Log]),
+%% io:format("mnesia_log:close_log ~p~n", [Log]),
+ case disk_log:sync(Log) of
+ ok -> ok;
+ {error, {read_only_mode, Log}} ->
+ ok;
+ {error, Reason} ->
+ mnesia_lib:important("Failed syncing ~p to_disk reason ~p ~n",
+ [Log, Reason])
+ end,
+ mnesia_monitor:close_log(Log).
+
+unsafe_close_log(Log) ->
+%% io:format("mnesia_log:close_log ~p~n", [Log]),
+ mnesia_monitor:unsafe_close_log(Log).
+
+
+purge_some_logs() ->
+ mnesia_monitor:unsafe_close_log(latest_log),
+ file:delete(latest_log_file()),
+ file:delete(decision_tab_file()).
+
+purge_all_logs() ->
+ file:delete(previous_log_file()),
+ file:delete(latest_log_file()),
+ file:delete(decision_tab_file()).
+
+%% Prepare dump by renaming the open logfile if possible
+%% Returns a tuple on the following format: {Res, OpenLog}
+%% where OpenLog is the file descriptor to log file, ready for append
+%% and Res is one of the following: already_dumped, needs_dump or {error, Reason}
+prepare_log_dump(InitBy) ->
+ Diff = mnesia_dumper:get_log_writes() -
+ mnesia_lib:read_counter(trans_log_writes_prev),
+ if
+ Diff == 0, InitBy /= startup ->
+ already_dumped;
+ true ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Prev = previous_log_file(),
+ prepare_prev(Diff, InitBy, Prev, exists(Prev));
+ false ->
+ already_dumped
+ end
+ end.
+
+prepare_prev(Diff, _, _, true) ->
+ {needs_dump, Diff};
+prepare_prev(Diff, startup, Prev, false) ->
+ Latest = latest_log_file(),
+ case exists(Latest) of
+ true ->
+ case file:rename(Latest, Prev) of
+ ok ->
+ {needs_dump, Diff};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ false ->
+ already_dumped
+ end;
+prepare_prev(Diff, _InitBy, Prev, false) ->
+ Head = trans_log_header(),
+ case mnesia_monitor:reopen_log(latest_log, Prev, Head) of
+ ok ->
+ {needs_dump, Diff};
+ {error, Reason} ->
+ Latest = latest_log_file(),
+ {error, {"Cannot rename log file",
+ [Latest, Prev, Reason]}}
+ end.
+
+%% Init dump and return PrevLogFileDesc or exit.
+init_log_dump() ->
+ Fname = previous_log_file(),
+ open_log(previous_log, trans_log_header(), Fname),
+ start.
+
+
+chunk_log(Cont) ->
+ chunk_log(previous_log, Cont).
+
+chunk_log(_Log, eof) ->
+ eof;
+chunk_log(Log, Cont) ->
+ case catch disk_log:chunk(Log, Cont) of
+ {error, Reason} ->
+ fatal("Possibly truncated ~p file: ~p~n",
+ [Log, Reason]);
+ {C2, Chunk, _BadBytes} ->
+ %% Read_only case, should we warn about the bad log file?
+ %% BUGBUG Should we crash if Repair == false ??
+ %% We got to check this !!
+ mnesia_lib:important("~p repaired, lost ~p bad bytes~n", [Log, _BadBytes]),
+ {C2, Chunk};
+ Other ->
+ Other
+ end.
+
+%% Confirms the dump by closing prev log and delete the file
+confirm_log_dump(Updates) ->
+ case mnesia_monitor:close_log(previous_log) of
+ ok ->
+ file:delete(previous_log_file()),
+ mnesia_lib:incr_counter(trans_log_writes_prev, Updates),
+ dumped;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Decision log
+
+open_decision_log() ->
+ Latest = decision_log_file(),
+ open_log(decision_log, decision_log_header(), Latest),
+ start.
+
+prepare_decision_log_dump() ->
+ Prev = previous_decision_log_file(),
+ prepare_decision_log_dump(exists(Prev), Prev).
+
+prepare_decision_log_dump(false, Prev) ->
+ Head = decision_log_header(),
+ case mnesia_monitor:reopen_log(decision_log, Prev, Head) of
+ ok ->
+ prepare_decision_log_dump(true, Prev);
+ {error, Reason} ->
+ fatal("Cannot rename decision log file ~p -> ~p: ~p~n",
+ [decision_log_file(), Prev, Reason])
+ end;
+prepare_decision_log_dump(true, Prev) ->
+ open_log(previous_decision_log, decision_log_header(), Prev),
+ start.
+
+chunk_decision_log(Cont) ->
+ %% dbg_out("chunk log ~p~n", [Cont]),
+ chunk_log(previous_decision_log, Cont).
+
+%% Confirms dump of the decision log
+confirm_decision_log_dump() ->
+ case mnesia_monitor:close_log(previous_decision_log) of
+ ok ->
+ file:delete(previous_decision_log_file());
+ {error, Reason} ->
+ fatal("Cannot confirm decision log dump: ~p~n",
+ [Reason])
+ end.
+
+save_decision_tab(Decisions) ->
+ Log = decision_tab,
+ Tmp = mnesia_lib:dir("DECISION_TAB.TMP"),
+ file:delete(Tmp),
+ open_log(Log, decision_tab_header(), Tmp),
+ append(Log, Decisions),
+ close_log(Log),
+ TabFile = decision_tab_file(),
+ ok = file:rename(Tmp, TabFile).
+
+open_decision_tab() ->
+ TabFile = decision_tab_file(),
+ open_log(decision_tab, decision_tab_header(), TabFile),
+ start.
+
+close_decision_tab() ->
+ close_log(decision_tab).
+
+chunk_decision_tab(Cont) ->
+ %% dbg_out("chunk tab ~p~n", [Cont]),
+ chunk_log(decision_tab, Cont).
+
+close_decision_log() ->
+ close_log(decision_log).
+
+log_decision(Decision) ->
+ append(decision_log, Decision).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Debug functions
+
+view() ->
+ lists:foreach(fun(F) -> view(F) end, log_files()).
+
+view(File) ->
+ mnesia_lib:show("***** ~p ***** ~n", [File]),
+ case exists(File) of
+ false ->
+ nolog;
+ true ->
+ N = view_only,
+ Args = [{file, File}, {name, N}, {mode, read_only}],
+ case disk_log:open(Args) of
+ {ok, N} ->
+ view_file(start, N);
+ {repaired, _, _, _} ->
+ view_file(start, N);
+ {error, Reason} ->
+ error("Cannot open log ~p: ~p~n", [File, Reason])
+ end
+ end.
+
+view_file(C, Log) ->
+ case disk_log:chunk(Log, C) of
+ {error, Reason} ->
+ error("** Possibly truncated FILE ~p~n", [Reason]),
+ error;
+ eof ->
+ disk_log:close(Log),
+ eof;
+ {C2, Terms, _BadBytes} ->
+ dbg_out("Lost ~p bytes in ~p ~n", [_BadBytes, Log]),
+ lists:foreach(fun(X) -> mnesia_lib:show("~p~n", [X]) end,
+ Terms),
+ view_file(C2, Log);
+ {C2, Terms} ->
+ lists:foreach(fun(X) -> mnesia_lib:show("~p~n", [X]) end,
+ Terms),
+ view_file(C2, Log)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup
+
+-record(backup_args, {name, module, opaque, scope, prev_name, tables, cookie}).
+
+backup(Opaque) ->
+ backup(Opaque, []).
+
+backup(Opaque, Mod) when atom(Mod) ->
+ backup(Opaque, [{module, Mod}]);
+backup(Opaque, Args) when list(Args) ->
+ %% Backup all tables with max redundancy
+ CpArgs = [{ram_overrides_dump, false}, {max, val({schema, tables})}],
+ case mnesia_checkpoint:activate(CpArgs) of
+ {ok, Name, _Nodes} ->
+ Res = backup_checkpoint(Name, Opaque, Args),
+ mnesia_checkpoint:deactivate(Name),
+ Res;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+backup_checkpoint(Name, Opaque) ->
+ backup_checkpoint(Name, Opaque, []).
+
+backup_checkpoint(Name, Opaque, Mod) when atom(Mod) ->
+ backup_checkpoint(Name, Opaque, [{module, Mod}]);
+backup_checkpoint(Name, Opaque, Args) when list(Args) ->
+ DefaultMod = mnesia_monitor:get_env(backup_module),
+ B = #backup_args{name = Name,
+ module = DefaultMod,
+ opaque = Opaque,
+ scope = global,
+ tables = all,
+ prev_name = Name},
+ case check_backup_args(Args, B) of
+ {ok, B2} ->
+ %% Decentralized backup
+ %% Incremental
+
+ Self = self(),
+ Pid = spawn_link(?MODULE, backup_master, [Self, B2]),
+ receive
+ {Pid, Self, Res} -> Res
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+check_backup_args([Arg | Tail], B) ->
+ case catch check_backup_arg_type(Arg, B) of
+ {'EXIT', _Reason} ->
+ {error, {badarg, Arg}};
+ B2 ->
+ check_backup_args(Tail, B2)
+ end;
+
+check_backup_args([], B) ->
+ {ok, B}.
+
+check_backup_arg_type(Arg, B) ->
+ case Arg of
+ {scope, global} ->
+ B#backup_args{scope = global};
+ {scope, local} ->
+ B#backup_args{scope = local};
+ {module, Mod} ->
+ Mod2 = mnesia_monitor:do_check_type(backup_module, Mod),
+ B#backup_args{module = Mod2};
+ {incremental, Name} ->
+ B#backup_args{prev_name = Name};
+ {tables, Tabs} when list(Tabs) ->
+ B#backup_args{tables = Tabs}
+ end.
+
+backup_master(ClientPid, B) ->
+ process_flag(trap_exit, true),
+ case catch do_backup_master(B) of
+ {'EXIT', Reason} ->
+ ClientPid ! {self(), ClientPid, {error, {'EXIT', Reason}}};
+ Res ->
+ ClientPid ! {self(), ClientPid, Res}
+ end,
+ unlink(ClientPid),
+ exit(normal).
+
+do_backup_master(B) ->
+ Name = B#backup_args.name,
+ B2 = safe_apply(B, open_write, [B#backup_args.opaque]),
+ B3 = safe_write(B2, [backup_log_header()]),
+ case mnesia_checkpoint:tables_and_cookie(Name) of
+ {ok, AllTabs, Cookie} ->
+ Tabs = select_tables(AllTabs, B3),
+ B4 = B3#backup_args{cookie = Cookie},
+ %% Always put schema first in backup file
+ B5 = backup_schema(B4, Tabs),
+ B6 = lists:foldl(fun backup_tab/2, B5, Tabs -- [schema]),
+ safe_apply(B6, commit_write, [B6#backup_args.opaque]),
+ ok;
+ {error, Reason} ->
+ abort_write(B3, {?MODULE, backup_master}, [B], {error, Reason})
+ end.
+
+select_tables(AllTabs, B) ->
+ Tabs =
+ case B#backup_args.tables of
+ all -> AllTabs;
+ SomeTabs when list(SomeTabs) -> SomeTabs
+ end,
+ case B#backup_args.scope of
+ global ->
+ Tabs;
+ local ->
+ Name = B#backup_args.name,
+ [T || T <- Tabs, mnesia_checkpoint:most_local_node(Name, T) == node()]
+ end.
+
+safe_write(B, []) ->
+ B;
+safe_write(B, Recs) ->
+ safe_apply(B, write, [B#backup_args.opaque, Recs]).
+
+backup_schema(B, Tabs) ->
+ case lists:member(schema, Tabs) of
+ true ->
+ backup_tab(schema, B);
+ false ->
+ Defs = [{schema, T, mnesia_schema:get_create_list(T)} || T <- Tabs],
+ safe_write(B, Defs)
+ end.
+
+safe_apply(B, write, [_, Items]) when Items == [] ->
+ B;
+safe_apply(B, What, Args) ->
+ Abort = fun(R) -> abort_write(B, What, Args, R) end,
+ receive
+ {'EXIT', Pid, R} -> Abort({'EXIT', Pid, R})
+ after 0 ->
+ Mod = B#backup_args.module,
+ case catch apply(Mod, What, Args) of
+ {ok, Opaque} -> B#backup_args{opaque=Opaque};
+ {error, R} -> Abort(R);
+ R -> Abort(R)
+ end
+ end.
+
+abort_write(B, What, Args, Reason) ->
+ Mod = B#backup_args.module,
+ Opaque = B#backup_args.opaque,
+ dbg_out("Failed to perform backup. M=~p:F=~p:A=~p -> ~p~n",
+ [Mod, What, Args, Reason]),
+ case catch apply(Mod, abort_write, [Opaque]) of
+ {ok, _Res} ->
+ throw({error, Reason});
+ Other ->
+ error("Failed to abort backup. ~p:~p~p -> ~p~n",
+ [Mod, abort_write, [Opaque], Other]),
+ throw({error, Reason})
+ end.
+
+backup_tab(Tab, B) ->
+ Name = B#backup_args.name,
+ case mnesia_checkpoint:most_local_node(Name, Tab) of
+ {ok, Node} when Node == node() ->
+ tab_copier(self(), B, Tab);
+ {ok, Node} ->
+ RemoteB = B,
+ Pid = spawn_link(Node, ?MODULE, tab_copier, [self(), RemoteB, Tab]),
+ RecName = val({Tab, record_name}),
+ tab_receiver(Pid, B, Tab, RecName, 0);
+ {error, Reason} ->
+ abort_write(B, {?MODULE, backup_tab}, [Tab, B], {error, Reason})
+ end.
+
+tab_copier(Pid, B, Tab) when record(B, backup_args) ->
+ %% Intentional crash at exit
+ Name = B#backup_args.name,
+ PrevName = B#backup_args.prev_name,
+ {FirstName, FirstSource} = select_source(Tab, Name, PrevName),
+
+ ?eval_debug_fun({?MODULE, tab_copier, pre}, [{name, Name}, {tab, Tab}]),
+ Res = handle_more(Pid, B, Tab, FirstName, FirstSource, Name),
+ ?eval_debug_fun({?MODULE, tab_copier, post}, [{name, Name}, {tab, Tab}]),
+
+ handle_last(Pid, Res).
+
+select_source(Tab, Name, PrevName) ->
+ if
+ Tab == schema ->
+ %% Always full backup of schema
+ {Name, table};
+ Name == PrevName ->
+ %% Full backup
+ {Name, table};
+ true ->
+ %% Wants incremental backup
+ case mnesia_checkpoint:most_local_node(PrevName, Tab) of
+ {ok, Node} when Node == node() ->
+ %% Accept incremental backup
+ {PrevName, retainer};
+ _ ->
+ %% Do a full backup anyway
+ dbg_out("Incremental backup escalated to full backup: ~p~n", [Tab]),
+ {Name, table}
+ end
+ end.
+
+handle_more(Pid, B, Tab, FirstName, FirstSource, Name) ->
+ Acc = {0, B},
+ case {mnesia_checkpoint:really_retain(Name, Tab),
+ mnesia_checkpoint:really_retain(FirstName, Tab)} of
+ {true, true} ->
+ Acc2 = iterate(B, FirstName, Tab, Pid, FirstSource, latest, first, Acc),
+ iterate(B, Name, Tab, Pid, retainer, checkpoint, last, Acc2);
+ {false, false}->
+ %% Put the dumped file in the backup
+ %% instead of the ram table. Does
+ %% only apply to ram_copies.
+ iterate(B, Name, Tab, Pid, retainer, checkpoint, last, Acc);
+ Bad ->
+ Reason = {"Checkpoints for incremental backup must have same "
+ "setting of ram_overrides_dump",
+ Tab, Name, FirstName, Bad},
+ abort_write(B, {?MODULE, backup_tab}, [Tab, B], {error, Reason})
+ end.
+
+handle_last(Pid, {_Count, B}) when Pid == self() ->
+ B;
+handle_last(Pid, _Acc) ->
+ unlink(Pid),
+ Pid ! {self(), {last, {ok, dummy}}},
+ exit(normal).
+
+iterate(B, Name, Tab, Pid, Source, Age, Pass, Acc) ->
+ Fun =
+ if
+ Pid == self() ->
+ RecName = val({Tab, record_name}),
+ fun(Recs, A) -> copy_records(RecName, Tab, Recs, A) end;
+ true ->
+ fun(Recs, A) -> send_records(Pid, Tab, Recs, Pass, A) end
+ end,
+ case mnesia_checkpoint:iterate(Name, Tab, Fun, Acc, Source, Age) of
+ {ok, Acc2} ->
+ Acc2;
+ {error, Reason} ->
+ R = {error, {"Tab copier iteration failed", Reason}},
+ abort_write(B, {?MODULE, iterate}, [self(), B, Tab], R)
+ end.
+
+copy_records(_RecName, _Tab, [], Acc) ->
+ Acc;
+copy_records(RecName, Tab, Recs, {Count, B}) ->
+ Recs2 = rec_filter(B, Tab, RecName, Recs),
+ B2 = safe_write(B, Recs2),
+ {Count + 1, B2}.
+
+send_records(Pid, Tab, Recs, Pass, {Count, B}) ->
+ receive
+ {Pid, more, Count} ->
+ if
+ Pass == last, Recs == [] ->
+ {Count, B};
+ true ->
+ Next = Count + 1,
+ Pid ! {self(), {more, Next, Recs}},
+ {Next, B}
+ end;
+ Msg ->
+ exit({send_records_unexpected_msg, Tab, Msg})
+ end.
+
+tab_receiver(Pid, B, Tab, RecName, Slot) ->
+ Pid ! {self(), more, Slot},
+ receive
+ {Pid, {more, Next, Recs}} ->
+ Recs2 = rec_filter(B, Tab, RecName, Recs),
+ B2 = safe_write(B, Recs2),
+ tab_receiver(Pid, B2, Tab, RecName, Next);
+
+ {Pid, {last, {ok,_}}} ->
+ B;
+
+ {'EXIT', Pid, {error, R}} ->
+ Reason = {error, {"Tab copier crashed", R}},
+ abort_write(B, {?MODULE, remote_tab_sender}, [self(), B, Tab], Reason);
+ {'EXIT', Pid, R} ->
+ Reason = {error, {"Tab copier crashed", {'EXIT', R}}},
+ abort_write(B, {?MODULE, remote_tab_sender}, [self(), B, Tab], Reason);
+ Msg ->
+ R = {error, {"Tab receiver got unexpected msg", Msg}},
+ abort_write(B, {?MODULE, remote_tab_sender}, [self(), B, Tab], R)
+ end.
+
+rec_filter(B, schema, _RecName, Recs) ->
+ case catch mnesia_bup:refresh_cookie(Recs, B#backup_args.cookie) of
+ Recs2 when list(Recs2) ->
+ Recs2;
+ {error, _Reason} ->
+ %% No schema table cookie
+ Recs
+ end;
+rec_filter(_B, Tab, Tab, Recs) ->
+ Recs;
+rec_filter(_B, Tab, _RecName, Recs) ->
+ [setelement(1, Rec, Tab) || Rec <- Recs].
+
+ets2dcd(Tab) ->
+ ets2dcd(Tab, dcd).
+
+ets2dcd(Tab, Ftype) ->
+ Fname =
+ case Ftype of
+ dcd -> mnesia_lib:tab2dcd(Tab);
+ dmp -> mnesia_lib:tab2dmp(Tab)
+ end,
+ TmpF = mnesia_lib:tab2tmp(Tab),
+ file:delete(TmpF),
+ Log = open_log({Tab, ets2dcd}, dcd_log_header(), TmpF, false),
+ mnesia_lib:db_fixtable(ram_copies, Tab, true),
+ ok = ets2dcd(mnesia_lib:db_init_chunk(ram_copies, Tab, 1000), Tab, Log),
+ mnesia_lib:db_fixtable(ram_copies, Tab, false),
+ close_log(Log),
+ ok = file:rename(TmpF, Fname),
+ %% Remove old log data which is now in the new dcd.
+ %% No one else should be accessing this file!
+ file:delete(mnesia_lib:tab2dcl(Tab)),
+ ok.
+
+ets2dcd('$end_of_table', _Tab, _Log) ->
+ ok;
+ets2dcd({Recs, Cont}, Tab, Log) ->
+ ok = disk_log:alog_terms(Log, Recs),
+ ets2dcd(mnesia_lib:db_chunk(ram_copies, Cont), Tab, Log).
+
+dcd2ets(Tab) ->
+ dcd2ets(Tab, mnesia_monitor:get_env(auto_repair)).
+
+dcd2ets(Tab, Rep) ->
+ Dcd = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dcd) of
+ true ->
+ Log = open_log({Tab, dcd2ets}, dcd_log_header(), Dcd,
+ true, Rep, read_only),
+ Data = chunk_log(Log, start),
+ ok = insert_dcdchunk(Data, Log, Tab),
+ close_log(Log),
+ load_dcl(Tab, Rep);
+ false -> %% Handle old dets files, and conversion from disc_only to disc.
+ Fname = mnesia_lib:tab2dat(Tab),
+ Type = val({Tab, setorbag}),
+ case mnesia_lib:dets_to_ets(Tab, Tab, Fname, Type, Rep, yes) of
+ loaded ->
+ ets2dcd(Tab),
+ file:delete(Fname),
+ 0;
+ {error, Error} ->
+ erlang:error({"Failed to load table from disc", [Tab, Error]})
+ end
+ end.
+
+insert_dcdchunk({Cont, [LogH | Rest]}, Log, Tab)
+ when record(LogH, log_header),
+ LogH#log_header.log_kind == dcd_log,
+ LogH#log_header.log_version >= "1.0" ->
+ insert_dcdchunk({Cont, Rest}, Log, Tab);
+
+insert_dcdchunk({Cont, Recs}, Log, Tab) ->
+ true = ets:insert(Tab, Recs),
+ insert_dcdchunk(chunk_log(Log, Cont), Log, Tab);
+insert_dcdchunk(eof, _Log, _Tab) ->
+ ok.
+
+load_dcl(Tab, Rep) ->
+ FName = mnesia_lib:tab2dcl(Tab),
+ case mnesia_lib:exists(FName) of
+ true ->
+ Name = {load_dcl,Tab},
+ open_log(Name,
+ dcl_log_header(),
+ FName,
+ true,
+ Rep,
+ read_only),
+ FirstChunk = chunk_log(Name, start),
+ N = insert_logchunk(FirstChunk, Name, 0),
+ close_log(Name),
+ N;
+ false ->
+ 0
+ end.
+
+insert_logchunk({C2, Recs}, Tab, C) ->
+ N = add_recs(Recs, C),
+ insert_logchunk(chunk_log(Tab, C2), Tab, C+N);
+insert_logchunk(eof, _Tab, C) ->
+ C.
+
+add_recs([{{Tab, _Key}, Val, write} | Rest], N) ->
+ true = ets:insert(Tab, Val),
+ add_recs(Rest, N+1);
+add_recs([{{Tab, Key}, _Val, delete} | Rest], N) ->
+ true = ets:delete(Tab, Key),
+ add_recs(Rest, N+1);
+add_recs([{{Tab, _Key}, Val, delete_object} | Rest], N) ->
+ true = ets:match_delete(Tab, Val),
+ add_recs(Rest, N+1);
+add_recs([{{Tab, Key}, Val, update_counter} | Rest], N) ->
+ {RecName, Incr} = Val,
+ case catch ets:update_counter(Tab, Key, Incr) of
+ CounterVal when integer(CounterVal) ->
+ ok;
+ _ ->
+ Zero = {RecName, Key, 0},
+ true = ets:insert(Tab, Zero)
+ end,
+ add_recs(Rest, N+1);
+add_recs([LogH|Rest], N)
+ when record(LogH, log_header),
+ LogH#log_header.log_kind == dcl_log,
+ LogH#log_header.log_version >= "1.0" ->
+ add_recs(Rest, N);
+add_recs([{{Tab, _Key}, _Val, clear_table} | Rest], N) ->
+ true = ets:match_delete(Tab, '_'),
+ add_recs(Rest, N+ets:info(Tab, size));
+add_recs([], N) ->
+ N.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_monitor.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_monitor.erl
new file mode 100644
index 0000000000..554f020ffb
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_monitor.erl
@@ -0,0 +1,776 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_monitor.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_monitor).
+
+-behaviour(gen_server).
+
+%% Public exports
+-export([
+ close_dets/1,
+ close_log/1,
+ detect_inconcistency/2,
+ get_env/1,
+ init/0,
+ mktab/2,
+ unsafe_mktab/2,
+ mnesia_down/2,
+ needs_protocol_conversion/1,
+ negotiate_protocol/1,
+ disconnect/1,
+ open_dets/2,
+ unsafe_open_dets/2,
+ open_log/1,
+ patch_env/2,
+ protocol_version/0,
+ reopen_log/3,
+ set_env/2,
+ start/0,
+ start_proc/4,
+ terminate_proc/3,
+ unsafe_close_dets/1,
+ unsafe_close_log/1,
+ use_dir/0,
+ do_check_type/2
+ ]).
+
+%% gen_server callbacks
+-export([
+ init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3
+ ]).
+
+%% Internal exports
+-export([
+ call/1,
+ cast/1,
+ detect_partitioned_network/2,
+ has_remote_mnesia_down/1
+ ]).
+
+-import(mnesia_lib, [dbg_out/2, verbose/2, error/2, fatal/2, set/2]).
+
+-include("mnesia.hrl").
+
+-record(state, {supervisor, pending_negotiators = [],
+ going_down = [], tm_started = false, early_connects = []}).
+
+-define(current_protocol_version, {7,6}).
+
+-define(previous_protocol_version, {7,5}).
+
+start() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE,
+ [self()], [{timeout, infinity}
+ %% ,{debug, [trace]}
+ ]).
+
+init() ->
+ call(init).
+
+mnesia_down(From, Node) ->
+ cast({mnesia_down, From, Node}).
+
+mktab(Tab, Args) ->
+ unsafe_call({mktab, Tab, Args}).
+unsafe_mktab(Tab, Args) ->
+ unsafe_call({unsafe_mktab, Tab, Args}).
+
+open_dets(Tab, Args) ->
+ unsafe_call({open_dets, Tab, Args}).
+unsafe_open_dets(Tab, Args) ->
+ unsafe_call({unsafe_open_dets, Tab, Args}).
+
+close_dets(Tab) ->
+ unsafe_call({close_dets, Tab}).
+
+unsafe_close_dets(Name) ->
+ unsafe_call({unsafe_close_dets, Name}).
+
+open_log(Args) ->
+ unsafe_call({open_log, Args}).
+
+reopen_log(Name, Fname, Head) ->
+ unsafe_call({reopen_log, Name, Fname, Head}).
+
+close_log(Name) ->
+ unsafe_call({close_log, Name}).
+
+unsafe_close_log(Name) ->
+ unsafe_call({unsafe_close_log, Name}).
+
+
+disconnect(Node) ->
+ cast({disconnect, Node}).
+
+%% Returns GoodNoodes
+%% Creates a link to each compatible monitor and
+%% protocol_version to agreed version upon success
+
+negotiate_protocol(Nodes) ->
+ Version = mnesia:system_info(version),
+ Protocols = acceptable_protocol_versions(),
+ MonitorPid = whereis(?MODULE),
+ Msg = {negotiate_protocol, MonitorPid, Version, Protocols},
+ {Replies, _BadNodes} = multicall(Nodes, Msg),
+ check_protocol(Replies, Protocols).
+
+check_protocol([{Node, {accept, Mon, _Version, Protocol}} | Tail], Protocols) ->
+ case lists:member(Protocol, Protocols) of
+ true ->
+ case Protocol == protocol_version() of
+ true ->
+ set({protocol, Node}, {Protocol, false});
+ false ->
+ set({protocol, Node}, {Protocol, true})
+ end,
+ [node(Mon) | check_protocol(Tail, Protocols)];
+ false ->
+ unlink(Mon), % Get rid of unneccessary link
+ check_protocol(Tail, Protocols)
+ end;
+check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
+ verbose("Failed to connect with ~p. ~p protocols rejected. "
+ "expected version = ~p, expected protocol = ~p~n",
+ [Node, Protocols, Version, Protocol]),
+ check_protocol(Tail, Protocols);
+check_protocol([{error, _Reason} | Tail], Protocols) ->
+ check_protocol(Tail, Protocols);
+check_protocol([{badrpc, _Reason} | Tail], Protocols) ->
+ check_protocol(Tail, Protocols);
+check_protocol([], [Protocol | _Protocols]) ->
+ set(protocol_version, Protocol),
+ [];
+check_protocol([], []) ->
+ set(protocol_version, protocol_version()),
+ [].
+
+protocol_version() ->
+ case ?catch_val(protocol_version) of
+ {'EXIT', _} -> ?current_protocol_version;
+ Version -> Version
+ end.
+
+%% A sorted list of acceptable protocols the
+%% preferred protocols are first in the list
+acceptable_protocol_versions() ->
+ [protocol_version(), ?previous_protocol_version].
+
+needs_protocol_conversion(Node) ->
+ case {?catch_val({protocol, Node}), protocol_version()} of
+ {{'EXIT', _}, _} ->
+ false;
+ {{_, Bool}, ?current_protocol_version} ->
+ Bool;
+ {{_, Bool}, _} ->
+ not Bool
+ end.
+
+cast(Msg) ->
+ case whereis(?MODULE) of
+ undefined -> ignore;
+ Pid -> gen_server:cast(Pid, Msg)
+ end.
+
+unsafe_call(Msg) ->
+ case whereis(?MODULE) of
+ undefined -> {error, {node_not_running, node()}};
+ Pid -> gen_server:call(Pid, Msg, infinity)
+ end.
+
+call(Msg) ->
+ case whereis(?MODULE) of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ link(Pid),
+ Res = gen_server:call(Pid, Msg, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+multicall(Nodes, Msg) ->
+ rpc:multicall(Nodes, ?MODULE, call, [Msg]).
+
+start_proc(Who, Mod, Fun, Args) ->
+ Args2 = [Who, Mod, Fun, Args],
+ proc_lib:start_link(mnesia_sp, init_proc, Args2, infinity).
+
+terminate_proc(Who, R, State) when R /= shutdown, R /= killed ->
+ fatal("~p crashed: ~p state: ~p~n", [Who, R, State]);
+
+terminate_proc(Who, Reason, _State) ->
+ mnesia_lib:verbose("~p terminated: ~p~n", [Who, Reason]),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Callback functions from gen_server
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ ?ets_new_table(mnesia_gvar, [set, public, named_table]),
+ set(subscribers, []),
+ mnesia_lib:verbose("~p starting: ~p~n", [?MODULE, self()]),
+ Version = mnesia:system_info(version),
+ set(version, Version),
+ dbg_out("Version: ~p~n", [Version]),
+
+ case catch process_config_args(env()) of
+ ok ->
+ mnesia_lib:set({'$$$_report', current_pos}, 0),
+ Level = mnesia_lib:val(debug),
+ mnesia_lib:verbose("Mnesia debug level set to ~p\n", [Level]),
+ set(mnesia_status, starting), %% set start status
+ set({current, db_nodes}, [node()]),
+ set(use_dir, use_dir()),
+ mnesia_lib:create_counter(trans_aborts),
+ mnesia_lib:create_counter(trans_commits),
+ mnesia_lib:create_counter(trans_log_writes),
+ Left = get_env(dump_log_write_threshold),
+ mnesia_lib:set_counter(trans_log_writes_left, Left),
+ mnesia_lib:create_counter(trans_log_writes_prev),
+ mnesia_lib:create_counter(trans_restarts),
+ mnesia_lib:create_counter(trans_failures),
+ ?ets_new_table(mnesia_held_locks, [bag, public, named_table]),
+ ?ets_new_table(mnesia_tid_locks, [bag, public, named_table]),
+ ?ets_new_table(mnesia_sticky_locks, [set, public, named_table]),
+ ?ets_new_table(mnesia_lock_queue,
+ [bag, public, named_table, {keypos, 2}]),
+ ?ets_new_table(mnesia_lock_counter, [set, public, named_table]),
+ set(checkpoints, []),
+ set(pending_checkpoints, []),
+ set(pending_checkpoint_pids, []),
+
+ {ok, #state{supervisor = Parent}};
+ {'EXIT', Reason} ->
+ mnesia_lib:report_fatal("Bad configuration: ~p~n", [Reason]),
+ {stop, {bad_config, Reason}}
+ end.
+
+use_dir() ->
+ case ?catch_val(use_dir) of
+ {'EXIT', _} ->
+ case get_env(schema_location) of
+ disc -> true;
+ opt_disc -> non_empty_dir();
+ ram -> false
+ end;
+ Bool ->
+ Bool
+ end.
+
+%% Returns true if the Mnesia directory contains
+%% important files
+non_empty_dir() ->
+ mnesia_lib:exists(mnesia_bup:fallback_bup()) or
+ mnesia_lib:exists(mnesia_lib:tab2dmp(schema)) or
+ mnesia_lib:exists(mnesia_lib:tab2dat(schema)).
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_call({mktab, Tab, Args}, _From, State) ->
+ case catch ?ets_new_table(Tab, Args) of
+ {'EXIT', ExitReason} ->
+ Msg = "Cannot create ets table",
+ Reason = {system_limit, Msg, Tab, Args, ExitReason},
+ fatal("~p~n", [Reason]),
+ {noreply, State};
+ Reply ->
+ {reply, Reply, State}
+ end;
+
+handle_call({unsafe_mktab, Tab, Args}, _From, State) ->
+ case catch ?ets_new_table(Tab, Args) of
+ {'EXIT', ExitReason} ->
+ {reply, {error, ExitReason}, State};
+ Reply ->
+ {reply, Reply, State}
+ end;
+
+
+handle_call({open_dets, Tab, Args}, _From, State) ->
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, Tab} ->
+ {reply, {ok, Tab}, State};
+
+ {error, Reason} ->
+ Msg = "Cannot open dets table",
+ Error = {error, {Msg, Tab, Args, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({unsafe_open_dets, Tab, Args}, _From, State) ->
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, Tab} ->
+ {reply, {ok, Tab}, State};
+ {error, Reason} ->
+ {reply, {error,Reason}, State}
+ end;
+
+handle_call({close_dets, Tab}, _From, State) ->
+ case mnesia_lib:dets_sync_close(Tab) of
+ ok ->
+ {reply, ok, State};
+ {error, Reason} ->
+ Msg = "Cannot close dets table",
+ Error = {error, {Msg, Tab, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({unsafe_close_dets, Tab}, _From, State) ->
+ mnesia_lib:dets_sync_close(Tab),
+ {reply, ok, State};
+
+handle_call({open_log, Args}, _From, State) ->
+ Res = disk_log:open([{notify, true}|Args]),
+ {reply, Res, State};
+
+handle_call({reopen_log, Name, Fname, Head}, _From, State) ->
+ case disk_log:reopen(Name, Fname, Head) of
+ ok ->
+ {reply, ok, State};
+
+ {error, Reason} ->
+ Msg = "Cannot rename disk_log file",
+ Error = {error, {Msg, Name, Fname, Head, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({close_log, Name}, _From, State) ->
+ case disk_log:close(Name) of
+ ok ->
+ {reply, ok, State};
+
+ {error, Reason} ->
+ Msg = "Cannot close disk_log file",
+ Error = {error, {Msg, Name, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({unsafe_close_log, Name}, _From, State) ->
+ disk_log:close(Name),
+ {reply, ok, State};
+
+handle_call({negotiate_protocol, Mon, _Version, _Protocols}, _From, State)
+ when State#state.tm_started == false ->
+ State2 = State#state{early_connects = [node(Mon) | State#state.early_connects]},
+ {reply, {node(), {reject, self(), uninitialized, uninitialized}}, State2};
+
+handle_call({negotiate_protocol, Mon, Version, Protocols}, From, State)
+ when node(Mon) /= node() ->
+ Protocol = protocol_version(),
+ MyVersion = mnesia:system_info(version),
+ case lists:member(Protocol, Protocols) of
+ true ->
+ accept_protocol(Mon, MyVersion, Protocol, From, State);
+ false ->
+ %% in this release we should be able to handle the previous
+ %% protocol
+ case hd(Protocols) of
+ ?previous_protocol_version ->
+ accept_protocol(Mon, MyVersion, ?previous_protocol_version, From, State);
+ _ ->
+ verbose("Connection with ~p rejected. "
+ "version = ~p, protocols = ~p, "
+ "expected version = ~p, expected protocol = ~p~n",
+ [node(Mon), Version, Protocols, MyVersion, Protocol]),
+ {reply, {node(), {reject, self(), MyVersion, Protocol}}, State}
+ end
+ end;
+
+handle_call(init, _From, State) ->
+ net_kernel:monitor_nodes(true),
+ EarlyNodes = State#state.early_connects,
+ State2 = State#state{tm_started = true},
+ {reply, EarlyNodes, State2};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+accept_protocol(Mon, Version, Protocol, From, State) ->
+ Reply = {node(), {accept, self(), Version, Protocol}},
+ Node = node(Mon),
+ Pending0 = State#state.pending_negotiators,
+ Pending = lists:keydelete(Node, 1, Pending0),
+ case lists:member(Node, State#state.going_down) of
+ true ->
+ %% Wait for the mnesia_down to be processed,
+ %% before we reply
+ P = Pending ++ [{Node, Mon, From, Reply}],
+ {noreply, State#state{pending_negotiators = P}};
+ false ->
+ %% No need for wait
+ link(Mon), %% link to remote Monitor
+ case Protocol == protocol_version() of
+ true ->
+ set({protocol, Node}, {Protocol, false});
+ false ->
+ set({protocol, Node}, {Protocol, true})
+ end,
+ {reply, Reply, State#state{pending_negotiators = Pending}}
+ end.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_cast({mnesia_down, mnesia_controller, Node}, State) ->
+ mnesia_tm:mnesia_down(Node),
+ {noreply, State};
+
+handle_cast({mnesia_down, mnesia_tm, {Node, Pending}}, State) ->
+ mnesia_locker:mnesia_down(Node, Pending),
+ {noreply, State};
+
+handle_cast({mnesia_down, mnesia_locker, Node}, State) ->
+ Down = {mnesia_down, Node},
+ mnesia_lib:report_system_event(Down),
+ GoingDown = lists:delete(Node, State#state.going_down),
+ State2 = State#state{going_down = GoingDown},
+ Pending = State#state.pending_negotiators,
+ case lists:keysearch(Node, 1, Pending) of
+ {value, {Node, Mon, ReplyTo, Reply}} ->
+ %% Late reply to remote monitor
+ link(Mon), %% link to remote Monitor
+ gen_server:reply(ReplyTo, Reply),
+ P2 = lists:keydelete(Node, 1,Pending),
+ State3 = State2#state{pending_negotiators = P2},
+ {noreply, State3};
+ false ->
+ %% No pending remote monitors
+ {noreply, State2}
+ end;
+
+handle_cast({disconnect, Node}, State) ->
+ case rpc:call(Node, erlang, whereis, [?MODULE]) of
+ {badrpc, _} ->
+ ignore;
+ RemoteMon when pid(RemoteMon) ->
+ unlink(RemoteMon)
+ end,
+ {noreply, State};
+
+handle_cast({inconsistent_database, Context, Node}, State) ->
+ Msg = {inconsistent_database, Context, Node},
+ mnesia_lib:report_system_event(Msg),
+ {noreply, State};
+
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
+ dbg_out("~p was ~p by supervisor~n",[?MODULE, R]),
+ {stop, R, State};
+
+handle_info({'EXIT', Pid, fatal}, State) when node(Pid) == node() ->
+ dbg_out("~p got FATAL ERROR from: ~p~n",[?MODULE, Pid]),
+ exit(State#state.supervisor, shutdown),
+ {noreply, State};
+
+handle_info({'EXIT', Pid, Reason}, State) ->
+ Node = node(Pid),
+ if
+ Node /= node() ->
+ %% Remotly linked process died, assume that it was a mnesia_monitor
+ mnesia_recover:mnesia_down(Node),
+ mnesia_controller:mnesia_down(Node),
+ {noreply, State#state{going_down = [Node | State#state.going_down]}};
+ true ->
+ %% We have probably got an exit signal from from
+ %% disk_log or dets
+ Hint = "Hint: check that the disk still is writable",
+ Msg = {'EXIT', Pid, Reason},
+ fatal("~p got unexpected info: ~p; ~p~n",
+ [?MODULE, Msg, Hint])
+ end;
+
+handle_info({nodeup, Node}, State) ->
+ %% Ok, we are connected to yet another Erlang node
+ %% Let's check if Mnesia is running there in order
+ %% to detect if the network has been partitioned
+ %% due to communication failure.
+
+ HasDown = mnesia_recover:has_mnesia_down(Node),
+ ImRunning = mnesia_lib:is_running(),
+
+ if
+ %% If I'm not running the test will be made later.
+ HasDown == true, ImRunning == yes ->
+ spawn_link(?MODULE, detect_partitioned_network, [self(), Node]);
+ true ->
+ ignore
+ end,
+ {noreply, State};
+
+handle_info({nodedown, _Node}, State) ->
+ %% Ignore, we are only caring about nodeup's
+ {noreply, State};
+
+handle_info({disk_log, _Node, Log, Info}, State) ->
+ case Info of
+ {truncated, _No} ->
+ ok;
+ _ ->
+ mnesia_lib:important("Warning Log file ~p error reason ~s~n",
+ [Log, disk_log:format_error(Info)])
+ end,
+ {noreply, State};
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info (~p): ~p~n", [?MODULE, State, Msg]).
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+terminate(Reason, State) ->
+ terminate_proc(?MODULE, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+process_config_args([]) ->
+ ok;
+process_config_args([C|T]) ->
+ V = get_env(C),
+ dbg_out("Env ~p: ~p~n", [C, V]),
+ mnesia_lib:set(C, V),
+ process_config_args(T).
+
+set_env(E,Val) ->
+ mnesia_lib:set(E, check_type(E,Val)),
+ ok.
+
+get_env(E) ->
+ case ?catch_val(E) of
+ {'EXIT', _} ->
+ case application:get_env(mnesia, E) of
+ {ok, Val} ->
+ check_type(E, Val);
+ undefined ->
+ check_type(E, default_env(E))
+ end;
+ Val ->
+ Val
+ end.
+
+env() ->
+ [
+ access_module,
+ auto_repair,
+ backup_module,
+ debug,
+ dir,
+ dump_log_load_regulation,
+ dump_log_time_threshold,
+ dump_log_update_in_place,
+ dump_log_write_threshold,
+ embedded_mnemosyne,
+ event_module,
+ extra_db_nodes,
+ ignore_fallback_at_startup,
+ fallback_error_function,
+ max_wait_for_decision,
+ schema_location,
+ core_dir
+ ].
+
+default_env(access_module) ->
+ mnesia;
+default_env(auto_repair) ->
+ true;
+default_env(backup_module) ->
+ mnesia_backup;
+default_env(debug) ->
+ none;
+default_env(dir) ->
+ Name = lists:concat(["Mnesia.", node()]),
+ filename:absname(Name);
+default_env(dump_log_load_regulation) ->
+ false;
+default_env(dump_log_time_threshold) ->
+ timer:minutes(3);
+default_env(dump_log_update_in_place) ->
+ true;
+default_env(dump_log_write_threshold) ->
+ 1000;
+default_env(embedded_mnemosyne) ->
+ false;
+default_env(event_module) ->
+ mnesia_event;
+default_env(extra_db_nodes) ->
+ [];
+default_env(ignore_fallback_at_startup) ->
+ false;
+default_env(fallback_error_function) ->
+ {mnesia, lkill};
+default_env(max_wait_for_decision) ->
+ infinity;
+default_env(schema_location) ->
+ opt_disc;
+default_env(core_dir) ->
+ false.
+
+check_type(Env, Val) ->
+ case catch do_check_type(Env, Val) of
+ {'EXIT', _Reason} ->
+ exit({bad_config, Env, Val});
+ NewVal ->
+ NewVal
+ end.
+
+do_check_type(access_module, A) when atom(A) -> A;
+do_check_type(auto_repair, B) -> bool(B);
+do_check_type(backup_module, B) when atom(B) -> B;
+do_check_type(debug, debug) -> debug;
+do_check_type(debug, false) -> none;
+do_check_type(debug, none) -> none;
+do_check_type(debug, trace) -> trace;
+do_check_type(debug, true) -> debug;
+do_check_type(debug, verbose) -> verbose;
+do_check_type(dir, V) -> filename:absname(V);
+do_check_type(dump_log_load_regulation, B) -> bool(B);
+do_check_type(dump_log_time_threshold, I) when integer(I), I > 0 -> I;
+do_check_type(dump_log_update_in_place, B) -> bool(B);
+do_check_type(dump_log_write_threshold, I) when integer(I), I > 0 -> I;
+do_check_type(event_module, A) when atom(A) -> A;
+do_check_type(ignore_fallback_at_startup, B) -> bool(B);
+do_check_type(fallback_error_function, {Mod, Func})
+ when atom(Mod), atom(Func) -> {Mod, Func};
+do_check_type(embedded_mnemosyne, B) -> bool(B);
+do_check_type(extra_db_nodes, L) when list(L) ->
+ Fun = fun(N) when N == node() -> false;
+ (A) when atom(A) -> true
+ end,
+ lists:filter(Fun, L);
+do_check_type(max_wait_for_decision, infinity) -> infinity;
+do_check_type(max_wait_for_decision, I) when integer(I), I > 0 -> I;
+do_check_type(schema_location, M) -> media(M);
+do_check_type(core_dir, "false") -> false;
+do_check_type(core_dir, false) -> false;
+do_check_type(core_dir, Dir) when list(Dir) -> Dir.
+
+
+bool(true) -> true;
+bool(false) -> false.
+
+media(disc) -> disc;
+media(opt_disc) -> opt_disc;
+media(ram) -> ram.
+
+patch_env(Env, Val) ->
+ case catch do_check_type(Env, Val) of
+ {'EXIT', _Reason} ->
+ {error, {bad_type, Env, Val}};
+ NewVal ->
+ application_controller:set_env(mnesia, Env, NewVal),
+ NewVal
+ end.
+
+detect_partitioned_network(Mon, Node) ->
+ GoodNodes = negotiate_protocol([Node]),
+ detect_inconcistency(GoodNodes, running_partitioned_network),
+ unlink(Mon),
+ exit(normal).
+
+detect_inconcistency([], _Context) ->
+ ok;
+detect_inconcistency(Nodes, Context) ->
+ Downs = [N || N <- Nodes, mnesia_recover:has_mnesia_down(N)],
+ {Replies, _BadNodes} =
+ rpc:multicall(Downs, ?MODULE, has_remote_mnesia_down, [node()]),
+ report_inconsistency(Replies, Context, ok).
+
+has_remote_mnesia_down(Node) ->
+ HasDown = mnesia_recover:has_mnesia_down(Node),
+ Master = mnesia_recover:get_master_nodes(schema),
+ if
+ HasDown == true, Master == [] ->
+ {true, node()};
+ true ->
+ {false, node()}
+ end.
+
+report_inconsistency([{true, Node} | Replies], Context, _Status) ->
+ %% Oops, Mnesia is already running on the
+ %% other node AND we both regard each
+ %% other as down. The database is
+ %% potentially inconsistent and we has to
+ %% do tell the applications about it, so
+ %% they may perform some clever recovery
+ %% action.
+ Msg = {inconsistent_database, Context, Node},
+ mnesia_lib:report_system_event(Msg),
+ report_inconsistency(Replies, Context, inconsistent_database);
+report_inconsistency([{false, _Node} | Replies], Context, Status) ->
+ report_inconsistency(Replies, Context, Status);
+report_inconsistency([{badrpc, _Reason} | Replies], Context, Status) ->
+ report_inconsistency(Replies, Context, Status);
+report_inconsistency([], _Context, Status) ->
+ Status.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_recover.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_recover.erl
new file mode 100644
index 0000000000..b3e8f1c386
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_recover.erl
@@ -0,0 +1,1175 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_recover.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_recover).
+
+-behaviour(gen_server).
+
+-export([
+ allow_garb/0,
+ call/1,
+ connect_nodes/1,
+ disconnect/1,
+ dump_decision_tab/0,
+ get_master_node_info/0,
+ get_master_node_tables/0,
+ get_master_nodes/1,
+ get_mnesia_downs/0,
+ has_mnesia_down/1,
+ incr_trans_tid_serial/0,
+ init/0,
+ log_decision/1,
+ log_master_nodes/3,
+ log_mnesia_down/1,
+ log_mnesia_up/1,
+ mnesia_down/1,
+ note_decision/2,
+ note_log_decision/2,
+ outcome/2,
+ start/0,
+ start_garb/0,
+ still_pending/1,
+ sync_trans_tid_serial/1,
+ wait_for_decision/2,
+ what_happened/3
+ ]).
+
+%% gen_server callbacks
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3
+ ]).
+
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [set/2, verbose/2, error/2, fatal/2]).
+
+-record(state, {supervisor,
+ unclear_pid,
+ unclear_decision,
+ unclear_waitfor,
+ tm_queue_len = 0,
+ initiated = false,
+ early_msgs = []
+ }).
+
+%%-define(DBG(F, A), mnesia:report_event(list_to_atom(lists:flatten(io_lib:format(F, A))))).
+%%-define(DBG(F, A), io:format("DBG: " ++ F, A)).
+
+-record(transient_decision, {tid, outcome}).
+
+start() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [self()],
+ [{timeout, infinity}
+ %%, {debug, [trace]}
+ ]).
+
+init() ->
+ call(init).
+
+start_garb() ->
+ Pid = whereis(mnesia_recover),
+ {ok, _} = timer:send_interval(timer:minutes(2), Pid, garb_decisions),
+ {ok, _} = timer:send_interval(timer:seconds(10), Pid, check_overload).
+
+allow_garb() ->
+ cast(allow_garb).
+
+
+%% The transaction log has either been swiched (latest -> previous) or
+%% there is nothing to be dumped. This means that the previous
+%% transaction log only may contain commit records which refers to
+%% transactions noted in the last two of the 'Prev' tables. All other
+%% tables may now be garbed by 'garb_decisions' (after 2 minutes).
+%% Max 10 tables are kept.
+do_allow_garb() ->
+ %% The order of the following stuff is important!
+ Curr = val(latest_transient_decision),
+ Old = val(previous_transient_decisions),
+ Next = create_transient_decision(),
+ {Prev, ReallyOld} = sublist([Curr | Old], 10, []),
+ [?ets_delete_table(Tab) || Tab <- ReallyOld],
+ set(previous_transient_decisions, Prev),
+ set(latest_transient_decision, Next).
+
+sublist([H|R], N, Acc) when N > 0 ->
+ sublist(R, N-1, [H| Acc]);
+sublist(List, _N, Acc) ->
+ {lists:reverse(Acc), List}.
+
+do_garb_decisions() ->
+ case val(previous_transient_decisions) of
+ [First, Second | Rest] ->
+ set(previous_transient_decisions, [First, Second]),
+ [?ets_delete_table(Tab) || Tab <- Rest];
+ _ ->
+ ignore
+ end.
+
+connect_nodes([]) ->
+ [];
+connect_nodes(Ns) ->
+ %% Determine which nodes we should try to connect
+ AlreadyConnected = val(recover_nodes),
+ {_, Nodes} = mnesia_lib:search_delete(node(), Ns),
+ Check = Nodes -- AlreadyConnected,
+ GoodNodes = mnesia_monitor:negotiate_protocol(Check),
+ if
+ GoodNodes == [] ->
+ %% No good noodes to connect to
+ ignore;
+ true ->
+ %% Now we have agreed upon a protocol with some new nodes
+ %% and we may use them when we recover transactions
+ mnesia_lib:add_list(recover_nodes, GoodNodes),
+ cast({announce_all, GoodNodes}),
+ case get_master_nodes(schema) of
+ [] ->
+ Context = starting_partitioned_network,
+ mnesia_monitor:detect_inconcistency(GoodNodes, Context);
+ _ -> %% If master_nodes is set ignore old inconsistencies
+ ignore
+ end
+ end,
+ {GoodNodes, AlreadyConnected}.
+
+disconnect(Node) ->
+ mnesia_monitor:disconnect(Node),
+ mnesia_lib:del(recover_nodes, Node).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+call(Msg) ->
+ Pid = whereis(?MODULE),
+ case Pid of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ link(Pid),
+ Res = gen_server:call(Pid, Msg, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+multicall(Nodes, Msg) ->
+ rpc:multicall(Nodes, ?MODULE, call, [Msg]).
+
+cast(Msg) ->
+ case whereis(?MODULE) of
+ undefined -> ignore;
+ Pid -> gen_server:cast(Pid, Msg)
+ end.
+
+abcast(Nodes, Msg) ->
+ gen_server:abcast(Nodes, ?MODULE, Msg).
+
+note_decision(Tid, Outcome) ->
+ Tab = val(latest_transient_decision),
+ ?ets_insert(Tab, #transient_decision{tid = Tid, outcome = Outcome}).
+
+note_up(Node, _Date, _Time) ->
+ ?ets_delete(mnesia_decision, Node).
+
+note_down(Node, Date, Time) ->
+ ?ets_insert(mnesia_decision, {mnesia_down, Node, Date, Time}).
+
+note_master_nodes(Tab, []) ->
+ ?ets_delete(mnesia_decision, Tab);
+note_master_nodes(Tab, Nodes) when list(Nodes) ->
+ Master = {master_nodes, Tab, Nodes},
+ ?ets_insert(mnesia_decision, Master).
+
+note_outcome(D) when D#decision.disc_nodes == [] ->
+%% ?DBG("~w: note_tmp_decision: ~w~n", [node(), D]),
+ note_decision(D#decision.tid, filter_outcome(D#decision.outcome)),
+ ?ets_delete(mnesia_decision, D#decision.tid);
+note_outcome(D) when D#decision.disc_nodes /= [] ->
+%% ?DBG("~w: note_decision: ~w~n", [node(), D]),
+ ?ets_insert(mnesia_decision, D).
+
+log_decision(D) when D#decision.outcome /= unclear ->
+ OldD = decision(D#decision.tid),
+ MergedD = merge_decisions(node(), OldD, D),
+ do_log_decision(MergedD, true);
+log_decision(D) ->
+ do_log_decision(D, false).
+
+do_log_decision(D, DoTell) ->
+ RamNs = D#decision.ram_nodes,
+ DiscNs = D#decision.disc_nodes -- [node()],
+ Outcome = D#decision.outcome,
+ D2 =
+ case Outcome of
+ aborted -> D#decision{disc_nodes = DiscNs};
+ committed -> D#decision{disc_nodes = DiscNs};
+ _ -> D
+ end,
+ note_outcome(D2),
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:append(latest_log, D2),
+ if
+ DoTell == true, Outcome /= unclear ->
+ tell_im_certain(DiscNs, D2),
+ tell_im_certain(RamNs, D2);
+ true ->
+ ignore
+ end;
+ false ->
+ ignore
+ end.
+
+tell_im_certain([], _D) ->
+ ignore;
+tell_im_certain(Nodes, D) ->
+ Msg = {im_certain, node(), D},
+%% ?DBG("~w: ~w: tell: ~w~n", [node(), Msg, Nodes]),
+ abcast(Nodes, Msg).
+
+log_mnesia_up(Node) ->
+ call({log_mnesia_up, Node}).
+
+log_mnesia_down(Node) ->
+ call({log_mnesia_down, Node}).
+
+get_mnesia_downs() ->
+ Tab = mnesia_decision,
+ Pat = {mnesia_down, '_', '_', '_'},
+ Downs = ?ets_match_object(Tab, Pat),
+ [Node || {mnesia_down, Node, _Date, _Time} <- Downs].
+
+%% Check if we have got a mnesia_down from Node
+has_mnesia_down(Node) ->
+ case ?ets_lookup(mnesia_decision, Node) of
+ [{mnesia_down, Node, _Date, _Time}] ->
+ true;
+ [] ->
+ false
+ end.
+
+mnesia_down(Node) ->
+ case ?catch_val(recover_nodes) of
+ {'EXIT', _} ->
+ %% Not started yet
+ ignore;
+ _ ->
+ mnesia_lib:del(recover_nodes, Node),
+ cast({mnesia_down, Node})
+ end.
+
+log_master_nodes(Args, UseDir, IsRunning) ->
+ if
+ IsRunning == yes ->
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ UseDir == false ->
+ ok;
+ true ->
+ Name = latest_log,
+ Fname = mnesia_log:latest_log_file(),
+ Exists = mnesia_lib:exists(Fname),
+ Repair = mnesia:system_info(auto_repair),
+ OpenArgs = [{file, Fname}, {name, Name}, {repair, Repair}],
+ case disk_log:open(OpenArgs) of
+ {ok, Name} ->
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ {repaired, Name, {recovered, _R}, {badbytes, _B}}
+ when Exists == true ->
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ {repaired, Name, {recovered, _R}, {badbytes, _B}}
+ when Exists == false ->
+ mnesia_log:write_trans_log_header(),
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end.
+
+log_master_nodes2([{Tab, Nodes} | Tail], UseDir, IsRunning, WorstRes) ->
+ Res =
+ case IsRunning of
+ yes ->
+ R = call({log_master_nodes, Tab, Nodes, UseDir, IsRunning}),
+ mnesia_controller:master_nodes_updated(Tab, Nodes),
+ R;
+ _ ->
+ do_log_master_nodes(Tab, Nodes, UseDir, IsRunning)
+ end,
+ case Res of
+ ok ->
+ log_master_nodes2(Tail, UseDir, IsRunning, WorstRes);
+ {error, Reason} ->
+ log_master_nodes2(Tail, UseDir, IsRunning, {error, Reason})
+ end;
+log_master_nodes2([], _UseDir, IsRunning, WorstRes) ->
+ case IsRunning of
+ yes ->
+ WorstRes;
+ _ ->
+ disk_log:close(latest_log),
+ WorstRes
+ end.
+
+get_master_node_info() ->
+ Tab = mnesia_decision,
+ Pat = {master_nodes, '_', '_'},
+ case catch mnesia_lib:db_match_object(ram_copies,Tab, Pat) of
+ {'EXIT', _} ->
+ [];
+ Masters ->
+ Masters
+ end.
+
+get_master_node_tables() ->
+ Masters = get_master_node_info(),
+ [Tab || {master_nodes, Tab, _Nodes} <- Masters].
+
+get_master_nodes(Tab) ->
+ case catch ?ets_lookup_element(mnesia_decision, Tab, 3) of
+ {'EXIT', _} -> [];
+ Nodes -> Nodes
+ end.
+
+%% Determine what has happened to the transaction
+what_happened(Tid, Protocol, Nodes) ->
+ Default =
+ case Protocol of
+ asym_trans -> aborted;
+ _ -> unclear %% sym_trans and sync_sym_trans
+ end,
+ This = node(),
+ case lists:member(This, Nodes) of
+ true ->
+ {ok, Outcome} = call({what_happened, Default, Tid}),
+ Others = Nodes -- [This],
+ case filter_outcome(Outcome) of
+ unclear -> what_happened_remotely(Tid, Default, Others);
+ aborted -> aborted;
+ committed -> committed
+ end;
+ false ->
+ what_happened_remotely(Tid, Default, Nodes)
+ end.
+
+what_happened_remotely(Tid, Default, Nodes) ->
+ {Replies, _} = multicall(Nodes, {what_happened, Default, Tid}),
+ check_what_happened(Replies, 0, 0).
+
+check_what_happened([H | T], Aborts, Commits) ->
+ case H of
+ {ok, R} ->
+ case filter_outcome(R) of
+ committed ->
+ check_what_happened(T, Aborts, Commits + 1);
+ aborted ->
+ check_what_happened(T, Aborts + 1, Commits);
+ unclear ->
+ check_what_happened(T, Aborts, Commits)
+ end;
+ {error, _} ->
+ check_what_happened(T, Aborts, Commits);
+ {badrpc, _} ->
+ check_what_happened(T, Aborts, Commits)
+ end;
+check_what_happened([], Aborts, Commits) ->
+ if
+ Aborts == 0, Commits == 0 -> aborted; % None of the active nodes knows
+ Aborts > 0 -> aborted; % Someody has aborted
+ Aborts == 0, Commits > 0 -> committed % All has committed
+ end.
+
+%% Determine what has happened to the transaction
+%% and possibly wait forever for the decision.
+wait_for_decision(presume_commit, _InitBy) ->
+ %% sym_trans
+ {{presume_commit, self()}, committed};
+
+wait_for_decision(D, InitBy) when D#decision.outcome == presume_abort ->
+ %% asym_trans
+ Tid = D#decision.tid,
+ Outcome = filter_outcome(outcome(Tid, D#decision.outcome)),
+ if
+ Outcome /= unclear ->
+ {Tid, Outcome};
+
+ InitBy /= startup ->
+ %% Wait a while for active transactions
+ %% to end and try again
+ timer:sleep(200),
+ wait_for_decision(D, InitBy);
+
+ InitBy == startup ->
+ {ok, Res} = call({wait_for_decision, D}),
+ {Tid, Res}
+ end.
+
+still_pending([Tid | Pending]) ->
+ case filter_outcome(outcome(Tid, unclear)) of
+ unclear -> [Tid | still_pending(Pending)];
+ _ -> still_pending(Pending)
+ end;
+still_pending([]) ->
+ [].
+
+load_decision_tab() ->
+ Cont = mnesia_log:open_decision_tab(),
+ load_decision_tab(Cont, load_decision_tab),
+ mnesia_log:close_decision_tab().
+
+load_decision_tab(eof, _InitBy) ->
+ ok;
+load_decision_tab(Cont, InitBy) ->
+ case mnesia_log:chunk_decision_tab(Cont) of
+ {Cont2, Decisions} ->
+ note_log_decisions(Decisions, InitBy),
+ load_decision_tab(Cont2, InitBy);
+ eof ->
+ ok
+ end.
+
+%% Dumps DECISION.LOG and PDECISION.LOG and removes them.
+%% From now on all decisions are logged in the transaction log file
+convert_old() ->
+ HasOldStuff =
+ mnesia_lib:exists(mnesia_log:previous_decision_log_file()) or
+ mnesia_lib:exists(mnesia_log:decision_log_file()),
+ case HasOldStuff of
+ true ->
+ mnesia_log:open_decision_log(),
+ dump_decision_log(startup),
+ dump_decision_log(startup),
+ mnesia_log:close_decision_log(),
+ Latest = mnesia_log:decision_log_file(),
+ ok = file:delete(Latest);
+ false ->
+ ignore
+ end.
+
+dump_decision_log(InitBy) ->
+ %% Assumed to be run in transaction log dumper process
+ Cont = mnesia_log:prepare_decision_log_dump(),
+ perform_dump_decision_log(Cont, InitBy).
+
+perform_dump_decision_log(eof, _InitBy) ->
+ confirm_decision_log_dump();
+perform_dump_decision_log(Cont, InitBy) when InitBy == startup ->
+ case mnesia_log:chunk_decision_log(Cont) of
+ {Cont2, Decisions} ->
+ note_log_decisions(Decisions, InitBy),
+ perform_dump_decision_log(Cont2, InitBy);
+ eof ->
+ confirm_decision_log_dump()
+ end;
+perform_dump_decision_log(_Cont, _InitBy) ->
+ confirm_decision_log_dump().
+
+confirm_decision_log_dump() ->
+ dump_decision_tab(),
+ mnesia_log:confirm_decision_log_dump().
+
+dump_decision_tab() ->
+ Tab = mnesia_decision,
+ All = mnesia_lib:db_match_object(ram_copies,Tab, '_'),
+ mnesia_log:save_decision_tab({decision_list, All}).
+
+note_log_decisions([What | Tail], InitBy) ->
+ note_log_decision(What, InitBy),
+ note_log_decisions(Tail, InitBy);
+note_log_decisions([], _InitBy) ->
+ ok.
+
+note_log_decision(NewD, InitBy) when NewD#decision.outcome == pre_commit ->
+ note_log_decision(NewD#decision{outcome = unclear}, InitBy);
+
+note_log_decision(NewD, _InitBy) when record(NewD, decision) ->
+ Tid = NewD#decision.tid,
+ sync_trans_tid_serial(Tid),
+ OldD = decision(Tid),
+ MergedD = merge_decisions(node(), OldD, NewD),
+ note_outcome(MergedD);
+
+note_log_decision({trans_tid, serial, _Serial}, startup) ->
+ ignore;
+
+note_log_decision({trans_tid, serial, Serial}, _InitBy) ->
+ sync_trans_tid_serial(Serial);
+
+note_log_decision({mnesia_up, Node, Date, Time}, _InitBy) ->
+ note_up(Node, Date, Time);
+
+note_log_decision({mnesia_down, Node, Date, Time}, _InitBy) ->
+ note_down(Node, Date, Time);
+
+note_log_decision({master_nodes, Tab, Nodes}, _InitBy) ->
+ note_master_nodes(Tab, Nodes);
+
+note_log_decision(H, _InitBy) when H#log_header.log_kind == decision_log ->
+ V = mnesia_log:decision_log_version(),
+ if
+ H#log_header.log_version == V->
+ ok;
+ H#log_header.log_version == "2.0" ->
+ verbose("Accepting an old version format of decision log: ~p~n",
+ [V]),
+ ok;
+ true ->
+ fatal("Bad version of decision log: ~p~n", [H])
+ end;
+
+note_log_decision(H, _InitBy) when H#log_header.log_kind == decision_tab ->
+ V = mnesia_log:decision_tab_version(),
+ if
+ V == H#log_header.log_version ->
+ ok;
+ true ->
+ fatal("Bad version of decision tab: ~p~n", [H])
+ end;
+note_log_decision({decision_list, ItemList}, InitBy) ->
+ note_log_decisions(ItemList, InitBy);
+note_log_decision(BadItem, InitBy) ->
+ exit({"Bad decision log item", BadItem, InitBy}).
+
+trans_tid_serial() ->
+ ?ets_lookup_element(mnesia_decision, serial, 3).
+
+set_trans_tid_serial(Val) ->
+ ?ets_insert(mnesia_decision, {trans_tid, serial, Val}).
+
+incr_trans_tid_serial() ->
+ ?ets_update_counter(mnesia_decision, serial, 1).
+
+sync_trans_tid_serial(ThatCounter) when integer(ThatCounter) ->
+ ThisCounter = trans_tid_serial(),
+ if
+ ThatCounter > ThisCounter ->
+ set_trans_tid_serial(ThatCounter + 1);
+ true ->
+ ignore
+ end;
+sync_trans_tid_serial(Tid) ->
+ sync_trans_tid_serial(Tid#tid.counter).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Callback functions from gen_server
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ mnesia_lib:verbose("~p starting: ~p~n", [?MODULE, self()]),
+ set(latest_transient_decision, create_transient_decision()),
+ set(previous_transient_decisions, []),
+ set(recover_nodes, []),
+ State = #state{supervisor = Parent},
+ {ok, State}.
+
+create_transient_decision() ->
+ ?ets_new_table(mnesia_transient_decision, [{keypos, 2}, set, public]).
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_call(init, From, State) when State#state.initiated == false ->
+ Args = [{keypos, 2}, set, public, named_table],
+ case mnesia_monitor:use_dir() of
+ true ->
+ ?ets_new_table(mnesia_decision, Args),
+ set_trans_tid_serial(0),
+ TabFile = mnesia_log:decision_tab_file(),
+ case mnesia_lib:exists(TabFile) of
+ true ->
+ load_decision_tab();
+ false ->
+ ignore
+ end,
+ convert_old(),
+ mnesia_dumper:opt_dump_log(scan_decisions);
+ false ->
+ ?ets_new_table(mnesia_decision, Args),
+ set_trans_tid_serial(0)
+ end,
+ handle_early_msgs(State, From);
+
+handle_call(Msg, From, State) when State#state.initiated == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ {noreply, State#state{early_msgs = [{call, Msg, From} | Msgs]}};
+
+handle_call({what_happened, Default, Tid}, _From, State) ->
+ sync_trans_tid_serial(Tid),
+ Outcome = outcome(Tid, Default),
+ {reply, {ok, Outcome}, State};
+
+handle_call({wait_for_decision, D}, From, State) ->
+ Recov = val(recover_nodes),
+ AliveRam = (mnesia_lib:intersect(D#decision.ram_nodes, Recov) -- [node()]),
+ RemoteDisc = D#decision.disc_nodes -- [node()],
+ if
+ AliveRam == [], RemoteDisc == [] ->
+ %% No more else to wait for and we may safely abort
+ {reply, {ok, aborted}, State};
+ true ->
+ verbose("Transaction ~p is unclear. "
+ "Wait for disc nodes: ~w ram: ~w~n",
+ [D#decision.tid, RemoteDisc, AliveRam]),
+ AliveDisc = mnesia_lib:intersect(RemoteDisc, Recov),
+ Msg = {what_decision, node(), D},
+ abcast(AliveRam, Msg),
+ abcast(AliveDisc, Msg),
+ case val(max_wait_for_decision) of
+ infinity ->
+ ignore;
+ MaxWait ->
+ ForceMsg = {force_decision, D#decision.tid},
+ {ok, _} = timer:send_after(MaxWait, ForceMsg)
+ end,
+ State2 = State#state{unclear_pid = From,
+ unclear_decision = D,
+ unclear_waitfor = (RemoteDisc ++ AliveRam)},
+ {noreply, State2}
+ end;
+
+handle_call({log_mnesia_up, Node}, _From, State) ->
+ do_log_mnesia_up(Node),
+ {reply, ok, State};
+
+handle_call({log_mnesia_down, Node}, _From, State) ->
+ do_log_mnesia_down(Node),
+ {reply, ok, State};
+
+handle_call({log_master_nodes, Tab, Nodes, UseDir, IsRunning}, _From, State) ->
+ do_log_master_nodes(Tab, Nodes, UseDir, IsRunning),
+ {reply, ok, State};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+do_log_mnesia_up(Node) ->
+ Yoyo = {mnesia_up, Node, Date = date(), Time = time()},
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:append(latest_log, Yoyo),
+ disk_log:sync(latest_log);
+ false ->
+ ignore
+ end,
+ note_up(Node, Date, Time).
+
+do_log_mnesia_down(Node) ->
+ Yoyo = {mnesia_down, Node, Date = date(), Time = time()},
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:append(latest_log, Yoyo),
+ disk_log:sync(latest_log);
+ false ->
+ ignore
+ end,
+ note_down(Node, Date, Time).
+
+do_log_master_nodes(Tab, Nodes, UseDir, IsRunning) ->
+ Master = {master_nodes, Tab, Nodes},
+ Res =
+ case UseDir of
+ true ->
+ LogRes = mnesia_log:append(latest_log, Master),
+ disk_log:sync(latest_log),
+ LogRes;
+ false ->
+ ok
+ end,
+ case IsRunning of
+ yes ->
+ note_master_nodes(Tab, Nodes);
+ _NotRunning ->
+ ignore
+ end,
+ Res.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_cast(Msg, State) when State#state.initiated == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ {noreply, State#state{early_msgs = [{cast, Msg} | Msgs]}};
+
+handle_cast({im_certain, Node, NewD}, State) ->
+ OldD = decision(NewD#decision.tid),
+ MergedD = merge_decisions(Node, OldD, NewD),
+ do_log_decision(MergedD, false),
+ {noreply, State};
+
+handle_cast(allow_garb, State) ->
+ do_allow_garb(),
+ {noreply, State};
+
+handle_cast({decisions, Node, Decisions}, State) ->
+ mnesia_lib:add(recover_nodes, Node),
+ State2 = add_remote_decisions(Node, Decisions, State),
+ {noreply, State2};
+
+handle_cast({what_decision, Node, OtherD}, State) ->
+ Tid = OtherD#decision.tid,
+ sync_trans_tid_serial(Tid),
+ Decision =
+ case decision(Tid) of
+ no_decision -> OtherD;
+ MyD when record(MyD, decision) -> MyD
+ end,
+ announce([Node], [Decision], [], true),
+ {noreply, State};
+
+handle_cast({mnesia_down, Node}, State) ->
+ case State#state.unclear_decision of
+ undefined ->
+ {noreply, State};
+ D ->
+ case lists:member(Node, D#decision.ram_nodes) of
+ false ->
+ {noreply, State};
+ true ->
+ State2 = add_remote_decision(Node, D, State),
+ {noreply, State2}
+ end
+ end;
+
+handle_cast({announce_all, Nodes}, State) ->
+ announce_all(Nodes, tabs()),
+ {noreply, State};
+
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+%% No need for buffering
+%% handle_info(Msg, State) when State#state.initiated == false ->
+%% %% Buffer early messages
+%% Msgs = State#state.early_msgs,
+%% {noreply, State#state{early_msgs = [{info, Msg} | Msgs]}};
+
+handle_info(check_overload, S) ->
+ %% Time to check if mnesia_tm is overloaded
+ case whereis(mnesia_tm) of
+ Pid when pid(Pid) ->
+
+ Threshold = 100,
+ Prev = S#state.tm_queue_len,
+ {message_queue_len, Len} =
+ process_info(Pid, message_queue_len),
+ if
+ Len > Threshold, Prev > Threshold ->
+ What = {mnesia_tm, message_queue_len, [Prev, Len]},
+ mnesia_lib:report_system_event({mnesia_overload, What}),
+ {noreply, S#state{tm_queue_len = 0}};
+
+ Len > Threshold ->
+ {noreply, S#state{tm_queue_len = Len}};
+
+ true ->
+ {noreply, S#state{tm_queue_len = 0}}
+ end;
+ undefined ->
+ {noreply, S}
+ end;
+
+handle_info(garb_decisions, State) ->
+ do_garb_decisions(),
+ {noreply, State};
+
+handle_info({force_decision, Tid}, State) ->
+ %% Enforce a transaction recovery decision,
+ %% if we still are waiting for the outcome
+
+ case State#state.unclear_decision of
+ U when U#decision.tid == Tid ->
+ verbose("Decided to abort transaction ~p since "
+ "max_wait_for_decision has been exceeded~n",
+ [Tid]),
+ D = U#decision{outcome = aborted},
+ State2 = add_remote_decision(node(), D, State),
+ {noreply, State2};
+ _ ->
+ {noreply, State}
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
+ mnesia_lib:dbg_out("~p was ~p~n",[?MODULE, R]),
+ {stop, shutdown, State};
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+
+terminate(Reason, State) ->
+ mnesia_monitor:terminate_proc(?MODULE, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+handle_early_msgs(State, From) ->
+ Res = do_handle_early_msgs(State#state.early_msgs,
+ State#state{early_msgs = [],
+ initiated = true}),
+ gen_server:reply(From, ok),
+ Res.
+
+do_handle_early_msgs([Msg | Msgs], State) ->
+ %% The messages are in reverted order
+ case do_handle_early_msgs(Msgs, State) of
+ {stop, Reason, Reply, State2} ->
+ {stop, Reason, Reply, State2};
+ {stop, Reason, State2} ->
+ {stop, Reason, State2};
+ {noreply, State2} ->
+ handle_early_msg(Msg, State2)
+ end;
+
+do_handle_early_msgs([], State) ->
+ {noreply, State}.
+
+handle_early_msg({call, Msg, From}, State) ->
+ case handle_call(Msg, From, State) of
+ {reply, R, S} ->
+ gen_server:reply(From, R),
+ {noreply, S};
+ Other ->
+ Other
+ end;
+handle_early_msg({cast, Msg}, State) ->
+ handle_cast(Msg, State);
+handle_early_msg({info, Msg}, State) ->
+ handle_info(Msg, State).
+
+tabs() ->
+ Curr = val(latest_transient_decision), % Do not miss any trans even
+ Prev = val(previous_transient_decisions), % if the tabs are switched
+ [Curr, mnesia_decision | Prev]. % Ordered by hit probability
+
+decision(Tid) ->
+ decision(Tid, tabs()).
+
+decision(Tid, [Tab | Tabs]) ->
+ case catch ?ets_lookup(Tab, Tid) of
+ [D] when record(D, decision) ->
+ D;
+ [C] when record(C, transient_decision) ->
+ #decision{tid = C#transient_decision.tid,
+ outcome = C#transient_decision.outcome,
+ disc_nodes = [],
+ ram_nodes = []
+ };
+ [] ->
+ decision(Tid, Tabs);
+ {'EXIT', _} ->
+ %% Recently switched transient decision table
+ decision(Tid, Tabs)
+ end;
+decision(_Tid, []) ->
+ no_decision.
+
+outcome(Tid, Default) ->
+ outcome(Tid, Default, tabs()).
+
+outcome(Tid, Default, [Tab | Tabs]) ->
+ case catch ?ets_lookup_element(Tab, Tid, 3) of
+ {'EXIT', _} ->
+ outcome(Tid, Default, Tabs);
+ Val ->
+ Val
+ end;
+outcome(_Tid, Default, []) ->
+ Default.
+
+filter_outcome(Val) ->
+ case Val of
+ unclear -> unclear;
+ aborted -> aborted;
+ presume_abort -> aborted;
+ committed -> committed;
+ pre_commit -> unclear
+ end.
+
+filter_aborted(D) when D#decision.outcome == presume_abort ->
+ D#decision{outcome = aborted};
+filter_aborted(D) ->
+ D.
+
+%% Merge old decision D with new (probably remote) decision
+merge_decisions(Node, D, NewD0) ->
+ NewD = filter_aborted(NewD0),
+ if
+ D == no_decision, node() /= Node ->
+ %% We did not know anything about this txn
+ NewD#decision{disc_nodes = []};
+ D == no_decision ->
+ NewD;
+ record(D, decision) ->
+ DiscNs = D#decision.disc_nodes -- ([node(), Node]),
+ OldD = filter_aborted(D#decision{disc_nodes = DiscNs}),
+%% mnesia_lib:dbg_out("merge ~w: NewD = ~w~n D = ~w~n OldD = ~w~n",
+%% [Node, NewD, D, OldD]),
+ if
+ OldD#decision.outcome == unclear,
+ NewD#decision.outcome == unclear ->
+ D;
+
+ OldD#decision.outcome == NewD#decision.outcome ->
+ %% We have come to the same decision
+ OldD;
+
+ OldD#decision.outcome == committed,
+ NewD#decision.outcome == aborted ->
+ %% Interesting! We have already committed,
+ %% but someone else has aborted. Now we
+ %% have a nice little inconcistency. The
+ %% other guy (or some one else) has
+ %% enforced a recovery decision when
+ %% max_wait_for_decision was exceeded.
+ %% We will pretend that we have obeyed
+ %% the forced recovery decision, but we
+ %% will also generate an event in case the
+ %% application wants to do something clever.
+ Msg = {inconsistent_database, bad_decision, Node},
+ mnesia_lib:report_system_event(Msg),
+ OldD#decision{outcome = aborted};
+
+ OldD#decision.outcome == aborted ->
+ %% aborted overrrides anything
+ OldD#decision{outcome = aborted};
+
+ NewD#decision.outcome == aborted ->
+ %% aborted overrrides anything
+ OldD#decision{outcome = aborted};
+
+ OldD#decision.outcome == committed,
+ NewD#decision.outcome == unclear ->
+ %% committed overrides unclear
+ OldD#decision{outcome = committed};
+
+ OldD#decision.outcome == unclear,
+ NewD#decision.outcome == committed ->
+ %% committed overrides unclear
+ OldD#decision{outcome = committed}
+ end
+ end.
+
+add_remote_decisions(Node, [D | Tail], State) when record(D, decision) ->
+ State2 = add_remote_decision(Node, D, State),
+ add_remote_decisions(Node, Tail, State2);
+
+add_remote_decisions(Node, [C | Tail], State)
+ when record(C, transient_decision) ->
+ D = #decision{tid = C#transient_decision.tid,
+ outcome = C#transient_decision.outcome,
+ disc_nodes = [],
+ ram_nodes = []},
+ State2 = add_remote_decision(Node, D, State),
+ add_remote_decisions(Node, Tail, State2);
+
+add_remote_decisions(Node, [{mnesia_down, _, _, _} | Tail], State) ->
+ add_remote_decisions(Node, Tail, State);
+
+add_remote_decisions(Node, [{trans_tid, serial, Serial} | Tail], State) ->
+ sync_trans_tid_serial(Serial),
+ case State#state.unclear_decision of
+ undefined ->
+ ignored;
+ D ->
+ case lists:member(Node, D#decision.ram_nodes) of
+ true ->
+ ignore;
+ false ->
+ abcast([Node], {what_decision, node(), D})
+ end
+ end,
+ add_remote_decisions(Node, Tail, State);
+
+add_remote_decisions(_Node, [], State) ->
+ State.
+
+add_remote_decision(Node, NewD, State) ->
+ Tid = NewD#decision.tid,
+ OldD = decision(Tid),
+ D = merge_decisions(Node, OldD, NewD),
+ do_log_decision(D, false),
+ Outcome = D#decision.outcome,
+ if
+ OldD == no_decision ->
+ ignore;
+ Outcome == unclear ->
+ ignore;
+ true ->
+ case lists:member(node(), NewD#decision.disc_nodes) or
+ lists:member(node(), NewD#decision.ram_nodes) of
+ true ->
+ tell_im_certain([Node], D);
+ false ->
+ ignore
+ end
+ end,
+ case State#state.unclear_decision of
+ U when U#decision.tid == Tid ->
+ WaitFor = State#state.unclear_waitfor -- [Node],
+ if
+ Outcome == unclear, WaitFor == [] ->
+ %% Everybody are uncertain, lets abort
+ NewOutcome = aborted,
+ CertainD = D#decision{outcome = NewOutcome,
+ disc_nodes = [],
+ ram_nodes = []},
+ tell_im_certain(D#decision.disc_nodes, CertainD),
+ tell_im_certain(D#decision.ram_nodes, CertainD),
+ do_log_decision(CertainD, false),
+ verbose("Decided to abort transaction ~p "
+ "since everybody are uncertain ~p~n",
+ [Tid, CertainD]),
+ gen_server:reply(State#state.unclear_pid, {ok, NewOutcome}),
+ State#state{unclear_pid = undefined,
+ unclear_decision = undefined,
+ unclear_waitfor = undefined};
+ Outcome /= unclear ->
+ verbose("~p told us that transaction ~p was ~p~n",
+ [Node, Tid, Outcome]),
+ gen_server:reply(State#state.unclear_pid, {ok, Outcome}),
+ State#state{unclear_pid = undefined,
+ unclear_decision = undefined,
+ unclear_waitfor = undefined};
+ Outcome == unclear ->
+ State#state{unclear_waitfor = WaitFor}
+ end;
+ _ ->
+ State
+ end.
+
+announce_all([], _Tabs) ->
+ ok;
+announce_all(ToNodes, [Tab | Tabs]) ->
+ case catch mnesia_lib:db_match_object(ram_copies, Tab, '_') of
+ {'EXIT', _} ->
+ %% Oops, we are in the middle of a 'garb_decisions'
+ announce_all(ToNodes, Tabs);
+ List ->
+ announce(ToNodes, List, [], false),
+ announce_all(ToNodes, Tabs)
+ end;
+announce_all(_ToNodes, []) ->
+ ok.
+
+announce(ToNodes, [Head | Tail], Acc, ForceSend) ->
+ Acc2 = arrange(ToNodes, Head, Acc, ForceSend),
+ announce(ToNodes, Tail, Acc2, ForceSend);
+
+announce(_ToNodes, [], Acc, _ForceSend) ->
+ send_decisions(Acc).
+
+send_decisions([{Node, Decisions} | Tail]) ->
+ abcast([Node], {decisions, node(), Decisions}),
+ send_decisions(Tail);
+send_decisions([]) ->
+ ok.
+
+arrange([To | ToNodes], D, Acc, ForceSend) when record(D, decision) ->
+ NeedsAdd = (ForceSend or
+ lists:member(To, D#decision.disc_nodes) or
+ lists:member(To, D#decision.ram_nodes)),
+ case NeedsAdd of
+ true ->
+ Acc2 = add_decision(To, D, Acc),
+ arrange(ToNodes, D, Acc2, ForceSend);
+ false ->
+ arrange(ToNodes, D, Acc, ForceSend)
+ end;
+
+arrange([To | ToNodes], C, Acc, ForceSend) when record(C, transient_decision) ->
+ Acc2 = add_decision(To, C, Acc),
+ arrange(ToNodes, C, Acc2, ForceSend);
+
+arrange([_To | _ToNodes], {mnesia_down, _Node, _Date, _Time}, Acc, _ForceSend) ->
+ %% The others have their own info about this
+ Acc;
+
+arrange([_To | _ToNodes], {master_nodes, _Tab, _Nodes}, Acc, _ForceSend) ->
+ %% The others have their own info about this
+ Acc;
+
+arrange([To | ToNodes], {trans_tid, serial, Serial}, Acc, ForceSend) ->
+ %% Do the lamport thing plus release the others
+ %% from uncertainity.
+ Acc2 = add_decision(To, {trans_tid, serial, Serial}, Acc),
+ arrange(ToNodes, {trans_tid, serial, Serial}, Acc2, ForceSend);
+
+arrange([], _Decision, Acc, _ForceSend) ->
+ Acc.
+
+add_decision(Node, Decision, [{Node, Decisions} | Tail]) ->
+ [{Node, [Decision | Decisions]} | Tail];
+add_decision(Node, Decision, [Head | Tail]) ->
+ [Head | add_decision(Node, Decision, Tail)];
+add_decision(Node, Decision, []) ->
+ [{Node, [Decision]}].
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_registry.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_registry.erl
new file mode 100644
index 0000000000..c16603f344
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_registry.erl
@@ -0,0 +1,277 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_registry.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+-module(mnesia_registry).
+
+%%%----------------------------------------------------------------------
+%%% File : mnesia_registry.erl
+%%% Purpose : Support dump and restore of a registry on a C-node
+%%% This is an OTP internal module and is not public available.
+%%%
+%%% Example : Dump some hardcoded records into the Mnesia table Tab
+%%%
+%%% case rpc:call(Node, mnesia_registry, start_dump, [Tab, self()]) of
+%%% Pid when pid(Pid) ->
+%%% Pid ! {write, key1, key_size1, val_type1, val_size1, val1},
+%%% Pid ! {delete, key3},
+%%% Pid ! {write, key2, key_size2, val_type2, val_size2, val2},
+%%% Pid ! {write, key4, key_size4, val_type4, val_size4, val4},
+%%% Pid ! {commit, self()},
+%%% receive
+%%% {ok, Pid} ->
+%%% ok;
+%%% {'EXIT', Pid, Reason} ->
+%%% exit(Reason)
+%%% end;
+%%% {badrpc, Reason} ->
+%%% exit(Reason)
+%%% end.
+%%%
+%%% Example : Restore the corresponding Mnesia table Tab
+%%%
+%%% case rpc:call(Node, mnesia_registry, start_restore, [Tab, self()]) of
+%%% {size, Pid, N, LargestKey, LargestVal} ->
+%%% Pid ! {send_records, self()},
+%%% Fun = fun() ->
+%%% receive
+%%% {restore, KeySize, ValSize, ValType, Key, Val} ->
+%%% {Key, Val};
+%%% {'EXIT', Pid, Reason} ->
+%%% exit(Reason)
+%%% end
+%%% end,
+%%% lists:map(Fun, lists:seq(1, N));
+%%% {badrpc, Reason} ->
+%%% exit(Reason)
+%%% end.
+%%%
+%%%----------------------------------------------------------------------
+
+%% External exports
+-export([start_dump/2, start_restore/2]).
+-export([create_table/1, create_table/2]).
+
+%% Internal exports
+-export([init/4]).
+
+-record(state, {table, ops = [], link_to}).
+
+-record(registry_entry, {key, key_size, val_type, val_size, val}).
+
+-record(size, {pid = self(), n_values = 0, largest_key = 0, largest_val = 0}).
+
+%%%----------------------------------------------------------------------
+%%% Client
+%%%----------------------------------------------------------------------
+
+start(Type, Tab, LinkTo) ->
+ Starter = self(),
+ Args = [Type, Starter, LinkTo, Tab],
+ Pid = spawn_link(?MODULE, init, Args),
+ %% The receiver process may unlink the current process
+ receive
+ {ok, Res} ->
+ Res;
+ {'EXIT', Pid, Reason} when LinkTo == Starter ->
+ exit(Reason)
+ end.
+
+%% Starts a receiver process and optionally creates a Mnesia table
+%% with suitable default values. Returns the Pid of the receiver process
+%%
+%% The receiver process accumulates Mnesia operations and performs
+%% all operations or none at commit. The understood messages are:
+%%
+%% {write, Key, KeySize, ValType, ValSize, Val} ->
+%% accumulates mnesia:write({Tab, Key, KeySize, ValType, ValSize, Val})
+%% (no reply)
+%% {delete, Key} ->
+%% accumulates mnesia:delete({Tab, Key}) (no reply)
+%% {commit, ReplyTo} ->
+%% commits all accumulated operations
+%% and stops the process (replies {ok, Pid})
+%% abort ->
+%% stops the process (no reply)
+%%
+%% The receiver process is linked to the process with the process identifier
+%% LinkTo. If some error occurs the receiver process will invoke exit(Reason)
+%% and it is up to he LinkTo process to act properly when it receives an exit
+%% signal.
+
+start_dump(Tab, LinkTo) ->
+ start(dump, Tab, LinkTo).
+
+%% Starts a sender process which sends restore messages back to the
+%% LinkTo process. But first are some statistics about the table
+%% determined and returned as a 5-tuple:
+%%
+%% {size, SenderPid, N, LargestKeySize, LargestValSize}
+%%
+%% where N is the number of records in the table. Then the sender process
+%% waits for a 2-tuple message:
+%%
+%% {send_records, ReplyTo}
+%%
+%% At last N 6-tuple messages is sent to the ReplyTo process:
+%%
+%% ReplyTo ! {restore, KeySize, ValSize, ValType, Key, Val}
+%%
+%% If some error occurs the receiver process will invoke exit(Reason)
+%% and it is up to he LinkTo process to act properly when it receives an
+%% exit signal.
+
+start_restore(Tab, LinkTo) ->
+ start(restore, Tab, LinkTo).
+
+
+%% Optionally creates the Mnesia table Tab with suitable default values.
+%% Returns ok or EXIT's
+create_table(Tab) ->
+ Storage = mnesia:table_info(schema, storage_type),
+ create_table(Tab, [{Storage, [node()]}]).
+
+create_table(Tab, TabDef) ->
+ Attrs = record_info(fields, registry_entry),
+ case mnesia:create_table(Tab, [{attributes, Attrs} | TabDef]) of
+ {'atomic', ok} ->
+ ok;
+ {aborted, {already_exists, Tab}} ->
+ ok;
+ {aborted, Reason} ->
+ exit(Reason)
+ end.
+
+%%%----------------------------------------------------------------------
+%%% Server
+%%%----------------------------------------------------------------------
+
+init(Type, Starter, LinkTo, Tab) ->
+ if
+ LinkTo /= Starter ->
+ link(LinkTo),
+ unlink(Starter);
+ true ->
+ ignore
+ end,
+ case Type of
+ dump ->
+ Starter ! {ok, self()},
+ dump_loop(#state{table = Tab, link_to = LinkTo});
+ restore ->
+ restore_table(Tab, Starter, LinkTo)
+ end.
+
+%%%----------------------------------------------------------------------
+%%% Dump loop
+%%%----------------------------------------------------------------------
+
+dump_loop(S) ->
+ Tab = S#state.table,
+ Ops = S#state.ops,
+ receive
+ {write, Key, KeySize, ValType, ValSize, Val} ->
+ RE = #registry_entry{key = Key,
+ key_size = KeySize,
+ val_type = ValType,
+ val_size = ValSize,
+ val = Val},
+ dump_loop(S#state{ops = [{write, RE} | Ops]});
+ {delete, Key} ->
+ dump_loop(S#state{ops = [{delete, Key} | Ops]});
+ {commit, ReplyTo} ->
+ create_table(Tab),
+ RecName = mnesia:table_info(Tab, record_name),
+ %% The Ops are in reverse order, but there is no need
+ %% for reversing the list of accumulated operations
+ case mnesia:transaction(fun handle_ops/3, [Tab, RecName, Ops]) of
+ {'atomic', ok} ->
+ ReplyTo ! {ok, self()},
+ stop(S#state.link_to);
+ {aborted, Reason} ->
+ exit({aborted, Reason})
+ end;
+ abort ->
+ stop(S#state.link_to);
+ BadMsg ->
+ exit({bad_message, BadMsg})
+ end.
+
+stop(LinkTo) ->
+ unlink(LinkTo),
+ exit(normal).
+
+%% Grab a write lock for the entire table
+%% and iterate over all accumulated operations
+handle_ops(Tab, RecName, Ops) ->
+ mnesia:write_lock_table(Tab),
+ do_handle_ops(Tab, RecName, Ops).
+
+do_handle_ops(Tab, RecName, [{write, RegEntry} | Ops]) ->
+ Record = setelement(1, RegEntry, RecName),
+ mnesia:write(Tab, Record, write),
+ do_handle_ops(Tab, RecName, Ops);
+do_handle_ops(Tab, RecName, [{delete, Key} | Ops]) ->
+ mnesia:delete(Tab, Key, write),
+ do_handle_ops(Tab, RecName, Ops);
+do_handle_ops(_Tab, _RecName, []) ->
+ ok.
+
+%%%----------------------------------------------------------------------
+%%% Restore table
+%%%----------------------------------------------------------------------
+
+restore_table(Tab, Starter, LinkTo) ->
+ Pat = mnesia:table_info(Tab, wild_pattern),
+ Fun = fun() -> mnesia:match_object(Tab, Pat, read) end,
+ case mnesia:transaction(Fun) of
+ {'atomic', AllRecords} ->
+ Size = calc_size(AllRecords, #size{}),
+ Starter ! {ok, Size},
+ receive
+ {send_records, ReplyTo} ->
+ send_records(AllRecords, ReplyTo),
+ unlink(LinkTo),
+ exit(normal);
+ BadMsg ->
+ exit({bad_message, BadMsg})
+ end;
+ {aborted, Reason} ->
+ exit(Reason)
+ end.
+
+calc_size([H | T], S) ->
+ KeySize = max(element(#registry_entry.key_size, H), S#size.largest_key),
+ ValSize = max(element(#registry_entry.val_size, H), S#size.largest_val),
+ N = S#size.n_values + 1,
+ calc_size(T, S#size{n_values = N, largest_key = KeySize, largest_val = ValSize});
+calc_size([], Size) ->
+ Size.
+
+max(New, Old) when New > Old -> New;
+max(_New, Old) -> Old.
+
+send_records([H | T], ReplyTo) ->
+ KeySize = element(#registry_entry.key_size, H),
+ ValSize = element(#registry_entry.val_size, H),
+ ValType = element(#registry_entry.val_type, H),
+ Key = element(#registry_entry.key, H),
+ Val = element(#registry_entry.val, H),
+ ReplyTo ! {restore, KeySize, ValSize, ValType, Key, Val},
+ send_records(T, ReplyTo);
+send_records([], _ReplyTo) ->
+ ok.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_schema.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_schema.erl
new file mode 100644
index 0000000000..cceb6bf0d1
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_schema.erl
@@ -0,0 +1,2899 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_schema.erl,v 1.2 2010/03/04 13:54:20 maria Exp $
+%%
+%% In this module we provide a number of explicit functions
+%% to maninpulate the schema. All these functions are called
+%% within a special schema transaction.
+%%
+%% We also have an init/1 function defined here, this func is
+%% used by mnesia:start() to initialize the entire schema.
+
+-module(mnesia_schema).
+
+-export([
+ add_snmp/2,
+ add_table_copy/3,
+ add_table_index/2,
+ arrange_restore/3,
+ attr_tab_to_pos/2,
+ attr_to_pos/2,
+ change_table_copy_type/3,
+ change_table_access_mode/2,
+ change_table_load_order/2,
+ change_table_frag/2,
+ clear_table/1,
+ create_table/1,
+ cs2list/1,
+ del_snmp/1,
+ del_table_copy/2,
+ del_table_index/2,
+ delete_cstruct/2,
+ delete_schema/1,
+ delete_schema2/0,
+ delete_table/1,
+ delete_table_property/2,
+ dump_tables/1,
+ ensure_no_schema/1,
+ get_create_list/1,
+ get_initial_schema/2,
+ get_table_properties/1,
+ info/0,
+ info/1,
+ init/1,
+ insert_cstruct/3,
+ is_remote_member/1,
+ list2cs/1,
+ lock_schema/0,
+ lock_del_table/4, % Spawned
+ merge_schema/0,
+ move_table/3,
+ opt_create_dir/2,
+ prepare_commit/3,
+ purge_dir/2,
+ purge_tmp_files/0,
+ ram_delete_table/2,
+% ram_delete_table/3,
+ read_cstructs_from_disc/0,
+ read_nodes/0,
+ remote_read_schema/0,
+ restore/1,
+ restore/2,
+ restore/3,
+ schema_coordinator/3,
+ set_where_to_read/3,
+ transform_table/4,
+ undo_prepare_commit/2,
+ unlock_schema/0,
+ version/0,
+ write_table_property/2
+ ]).
+
+%% Exports for mnesia_frag
+-export([
+ get_tid_ts_and_lock/2,
+ make_create_table/1,
+ ensure_active/1,
+ pick/4,
+ verify/3,
+ incr_version/1,
+ check_keys/3,
+ check_duplicates/2,
+ make_delete_table/2
+ ]).
+
+%% Needed outside to be able to use/set table_properties
+%% from user (not supported)
+-export([schema_transaction/1,
+ insert_schema_ops/2,
+ do_create_table/1,
+ do_delete_table/1,
+ do_delete_table_property/2,
+ do_write_table_property/2]).
+
+-include("mnesia.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-import(mnesia_lib, [set/2, del/2, verbose/2, dbg_out/2]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Here comes the init function which also resides in
+%% this module, it is called upon by the trans server
+%% at startup of the system
+%%
+%% We have a meta table which looks like
+%% {table, schema,
+%% {type, set},
+%% {disc_copies, all},
+%% {arity, 2}
+%% {attributes, [key, val]}
+%%
+%% This means that we have a series of {schema, Name, Cs} tuples
+%% in a table called schema !!
+
+init(IgnoreFallback) ->
+ Res = read_schema(true, false, IgnoreFallback),
+ {ok, Source, _CreateList} = exit_on_error(Res),
+ verbose("Schema initiated from: ~p~n", [Source]),
+ set({schema, tables}, []),
+ set({schema, local_tables}, []),
+ Tabs = set_schema(?ets_first(schema)),
+ lists:foreach(fun(Tab) -> clear_whereabouts(Tab) end, Tabs),
+ set({schema, where_to_read}, node()),
+ set({schema, load_node}, node()),
+ set({schema, load_reason}, initial),
+ mnesia_controller:add_active_replica(schema, node()).
+
+exit_on_error({error, Reason}) ->
+ exit(Reason);
+exit_on_error(GoodRes) ->
+ GoodRes.
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+%% This function traverses all cstructs in the schema and
+%% sets all values in mnesia_gvar accordingly for each table/cstruct
+
+set_schema('$end_of_table') ->
+ [];
+set_schema(Tab) ->
+ do_set_schema(Tab),
+ [Tab | set_schema(?ets_next(schema, Tab))].
+
+get_create_list(Tab) ->
+ ?ets_lookup_element(schema, Tab, 3).
+
+do_set_schema(Tab) ->
+ List = get_create_list(Tab),
+ Cs = list2cs(List),
+ do_set_schema(Tab, Cs).
+
+do_set_schema(Tab, Cs) ->
+ Type = Cs#cstruct.type,
+ set({Tab, setorbag}, Type),
+ set({Tab, local_content}, Cs#cstruct.local_content),
+ set({Tab, ram_copies}, Cs#cstruct.ram_copies),
+ set({Tab, disc_copies}, Cs#cstruct.disc_copies),
+ set({Tab, disc_only_copies}, Cs#cstruct.disc_only_copies),
+ set({Tab, load_order}, Cs#cstruct.load_order),
+ set({Tab, access_mode}, Cs#cstruct.access_mode),
+ set({Tab, snmp}, Cs#cstruct.snmp),
+ set({Tab, user_properties}, Cs#cstruct.user_properties),
+ [set({Tab, user_property, element(1, P)}, P) || P <- Cs#cstruct.user_properties],
+ set({Tab, frag_properties}, Cs#cstruct.frag_properties),
+ mnesia_frag:set_frag_hash(Tab, Cs#cstruct.frag_properties),
+ set({Tab, attributes}, Cs#cstruct.attributes),
+ Arity = length(Cs#cstruct.attributes) + 1,
+ set({Tab, arity}, Arity),
+ RecName = Cs#cstruct.record_name,
+ set({Tab, record_name}, RecName),
+ set({Tab, record_validation}, {RecName, Arity, Type}),
+ set({Tab, wild_pattern}, wild(RecName, Arity)),
+ set({Tab, index}, Cs#cstruct.index),
+ %% create actual index tabs later
+ set({Tab, cookie}, Cs#cstruct.cookie),
+ set({Tab, version}, Cs#cstruct.version),
+ set({Tab, cstruct}, Cs),
+ Storage = mnesia_lib:schema_cs_to_storage_type(node(), Cs),
+ set({Tab, storage_type}, Storage),
+ mnesia_lib:add({schema, tables}, Tab),
+ Ns = mnesia_lib:cs_to_nodes(Cs),
+ case lists:member(node(), Ns) of
+ true ->
+ mnesia_lib:add({schema, local_tables}, Tab);
+ false when Tab == schema ->
+ mnesia_lib:add({schema, local_tables}, Tab);
+ false ->
+ ignore
+ end.
+
+wild(RecName, Arity) ->
+ Wp0 = list_to_tuple(lists:duplicate(Arity, '_')),
+ setelement(1, Wp0, RecName).
+
+%% Temporarily read the local schema and return a list
+%% of all nodes mentioned in the schema.DAT file
+read_nodes() ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case read_schema(false, false) of
+ {ok, _Source, CreateList} ->
+ Cs = list2cs(CreateList),
+ {ok, Cs#cstruct.disc_copies ++ Cs#cstruct.ram_copies};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%% Returns Version from the tuple {Version,MasterNodes}
+version() ->
+ case read_schema(false, false) of
+ {ok, Source, CreateList} when Source /= default ->
+ Cs = list2cs(CreateList),
+ {Version, _Details} = Cs#cstruct.version,
+ Version;
+ _ ->
+ case dir_exists(mnesia_lib:dir()) of
+ true -> {1,0};
+ false -> {0,0}
+ end
+ end.
+
+%% Calculate next table version from old cstruct
+incr_version(Cs) ->
+ {{Major, Minor}, _} = Cs#cstruct.version,
+ Nodes = mnesia_lib:intersect(val({schema, disc_copies}),
+ mnesia_lib:cs_to_nodes(Cs)),
+ V =
+ case Nodes -- val({Cs#cstruct.name, active_replicas}) of
+ [] -> {Major + 1, 0}; % All replicas are active
+ _ -> {Major, Minor + 1} % Some replicas are inactive
+ end,
+ Cs#cstruct{version = {V, {node(), now()}}}.
+
+%% Returns table name
+insert_cstruct(Tid, Cs, KeepWhereabouts) ->
+ Tab = Cs#cstruct.name,
+ TabDef = cs2list(Cs),
+ Val = {schema, Tab, TabDef},
+ mnesia_checkpoint:tm_retain(Tid, schema, Tab, write),
+ mnesia_subscr:report_table_event(schema, Tid, Val, write),
+ Active = val({Tab, active_replicas}),
+
+ case KeepWhereabouts of
+ true ->
+ ignore;
+ false when Active == [] ->
+ clear_whereabouts(Tab);
+ false ->
+ %% Someone else has initiated table
+ ignore
+ end,
+ set({Tab, cstruct}, Cs),
+ ?ets_insert(schema, Val),
+ do_set_schema(Tab, Cs),
+ Val.
+
+clear_whereabouts(Tab) ->
+ set({Tab, checkpoints}, []),
+ set({Tab, subscribers}, []),
+ set({Tab, where_to_read}, nowhere),
+ set({Tab, active_replicas}, []),
+ set({Tab, commit_work}, []),
+ set({Tab, where_to_write}, []),
+ set({Tab, where_to_commit}, []),
+ set({Tab, load_by_force}, false),
+ set({Tab, load_node}, unknown),
+ set({Tab, load_reason}, unknown).
+
+%% Returns table name
+delete_cstruct(Tid, Cs) ->
+ Tab = Cs#cstruct.name,
+ TabDef = cs2list(Cs),
+ Val = {schema, Tab, TabDef},
+ mnesia_checkpoint:tm_retain(Tid, schema, Tab, delete),
+ mnesia_subscr:report_table_event(schema, Tid, Val, delete),
+ ?ets_match_delete(mnesia_gvar, {{Tab, '_'}, '_'}),
+ ?ets_match_delete(mnesia_gvar, {{Tab, '_', '_'}, '_'}),
+ del({schema, local_tables}, Tab),
+ del({schema, tables}, Tab),
+ ?ets_delete(schema, Tab),
+ Val.
+
+%% Delete the Mnesia directory on all given nodes
+%% Requires that Mnesia is not running anywhere
+%% Returns ok | {error,Reason}
+delete_schema(Ns) when list(Ns), Ns /= [] ->
+ RunningNs = mnesia_lib:running_nodes(Ns),
+ Reason = "Cannot delete schema on all nodes",
+ if
+ RunningNs == [] ->
+ case rpc:multicall(Ns, ?MODULE, delete_schema2, []) of
+ {Replies, []} ->
+ case [R || R <- Replies, R /= ok] of
+ [] ->
+ ok;
+ BadReplies ->
+ verbose("~s: ~p~n", [Reason, BadReplies]),
+ {error, {"All nodes not running", BadReplies}}
+ end;
+ {_Replies, BadNs} ->
+ verbose("~s: ~p~n", [Reason, BadNs]),
+ {error, {"All nodes not running", BadNs}}
+ end;
+ true ->
+ verbose("~s: ~p~n", [Reason, RunningNs]),
+ {error, {"Mnesia is not stopped everywhere", RunningNs}}
+ end;
+delete_schema(Ns) ->
+ {error, {badarg, Ns}}.
+
+delete_schema2() ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case mnesia_lib:is_running() of
+ no ->
+ Dir = mnesia_lib:dir(),
+ purge_dir(Dir, []),
+ ok;
+ _ ->
+ {error, {"Mnesia still running", node()}}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+ensure_no_schema([H|T]) when atom(H) ->
+ case rpc:call(H, ?MODULE, remote_read_schema, []) of
+ {badrpc, Reason} ->
+ {H, {"All nodes not running", H, Reason}};
+ {ok,Source, _} when Source /= default ->
+ {H, {already_exists, H}};
+ _ ->
+ ensure_no_schema(T)
+ end;
+ensure_no_schema([H|_]) ->
+ {error,{badarg, H}};
+ensure_no_schema([]) ->
+ ok.
+
+remote_read_schema() ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case mnesia_monitor:get_env(schema_location) of
+ opt_disc ->
+ read_schema(false, true);
+ _ ->
+ read_schema(false, false)
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+dir_exists(Dir) ->
+ dir_exists(Dir, mnesia_monitor:use_dir()).
+dir_exists(Dir, true) ->
+ case file:read_file_info(Dir) of
+ {ok, _} -> true;
+ _ -> false
+ end;
+dir_exists(_Dir, false) ->
+ false.
+
+opt_create_dir(UseDir, Dir) when UseDir == true->
+ case dir_exists(Dir, UseDir) of
+ true ->
+ check_can_write(Dir);
+ false ->
+ case file:make_dir(Dir) of
+ ok ->
+ verbose("Create Directory ~p~n", [Dir]),
+ ok;
+ {error, Reason} ->
+ verbose("Cannot create mnesia dir ~p~n", [Reason]),
+ {error, {"Cannot create Mnesia dir", Dir, Reason}}
+ end
+ end;
+opt_create_dir(false, _) ->
+ {error, {has_no_disc, node()}}.
+
+check_can_write(Dir) ->
+ case file:read_file_info(Dir) of
+ {ok, FI} when FI#file_info.type == directory,
+ FI#file_info.access == read_write ->
+ ok;
+ {ok, _} ->
+ {error, "Not allowed to write in Mnesia dir", Dir};
+ _ ->
+ {error, "Non existent Mnesia dir", Dir}
+ end.
+
+lock_schema() ->
+ mnesia_lib:lock_table(schema).
+
+unlock_schema() ->
+ mnesia_lib:unlock_table(schema).
+
+read_schema(Keep, _UseDirAnyway) ->
+ read_schema(Keep, false, false).
+
+%% The schema may be read for several reasons.
+%% If Mnesia is not already started the read intention
+%% we normally do not want the ets table named schema
+%% be left around.
+%% If Keep == true, the ets table schema is kept
+%% If Keep == false, the ets table schema is removed
+%%
+%% Returns {ok, Source, SchemaCstruct} or {error, Reason}
+%% Source may be: default | ram | disc | fallback
+
+read_schema(Keep, UseDirAnyway, IgnoreFallback) ->
+ lock_schema(),
+ Res =
+ case mnesia:system_info(is_running) of
+ yes ->
+ {ok, ram, get_create_list(schema)};
+ _IsRunning ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ read_disc_schema(Keep, IgnoreFallback);
+ false when UseDirAnyway == true ->
+ read_disc_schema(Keep, IgnoreFallback);
+ false when Keep == true ->
+ Args = [{keypos, 2}, public, named_table, set],
+ mnesia_monitor:mktab(schema, Args),
+ CreateList = get_initial_schema(ram_copies, []),
+ ?ets_insert(schema,{schema, schema, CreateList}),
+ {ok, default, CreateList};
+ false when Keep == false ->
+ CreateList = get_initial_schema(ram_copies, []),
+ {ok, default, CreateList}
+ end
+ end,
+ unlock_schema(),
+ Res.
+
+read_disc_schema(Keep, IgnoreFallback) ->
+ Running = mnesia:system_info(is_running),
+ case mnesia_bup:fallback_exists() of
+ true when IgnoreFallback == false, Running /= yes ->
+ mnesia_bup:fallback_to_schema();
+ _ ->
+ %% If we're running, we read the schema file even
+ %% if fallback exists
+ Dat = mnesia_lib:tab2dat(schema),
+ case mnesia_lib:exists(Dat) of
+ true ->
+ do_read_disc_schema(Dat, Keep);
+ false ->
+ Dmp = mnesia_lib:tab2dmp(schema),
+ case mnesia_lib:exists(Dmp) of
+ true ->
+ %% May only happen when toggling of
+ %% schema storage type has been
+ %% interrupted
+ do_read_disc_schema(Dmp, Keep);
+ false ->
+ {error, "No schema file exists"}
+ end
+ end
+ end.
+
+do_read_disc_schema(Fname, Keep) ->
+ T =
+ case Keep of
+ false ->
+ Args = [{keypos, 2}, public, set],
+ ?ets_new_table(schema, Args);
+ true ->
+ Args = [{keypos, 2}, public, named_table, set],
+ mnesia_monitor:mktab(schema, Args)
+ end,
+ Repair = mnesia_monitor:get_env(auto_repair),
+ Res = % BUGBUG Fixa till dcl!
+ case mnesia_lib:dets_to_ets(schema, T, Fname, set, Repair, no) of
+ loaded -> {ok, disc, ?ets_lookup_element(T, schema, 3)};
+ Other -> {error, {"Cannot read schema", Fname, Other}}
+ end,
+ case Keep of
+ true -> ignore;
+ false -> ?ets_delete_table(T)
+ end,
+ Res.
+
+get_initial_schema(SchemaStorage, Nodes) ->
+ Cs = #cstruct{name = schema,
+ record_name = schema,
+ attributes = [table, cstruct]},
+ Cs2 =
+ case SchemaStorage of
+ ram_copies -> Cs#cstruct{ram_copies = Nodes};
+ disc_copies -> Cs#cstruct{disc_copies = Nodes}
+ end,
+ cs2list(Cs2).
+
+read_cstructs_from_disc() ->
+ %% Assumptions:
+ %% - local schema lock in global
+ %% - use_dir is true
+ %% - Mnesia is not running
+ %% - Ignore fallback
+
+ Fname = mnesia_lib:tab2dat(schema),
+ case mnesia_lib:exists(Fname) of
+ true ->
+ Args = [{file, Fname},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, set}],
+ case dets:open_file(make_ref(), Args) of
+ {ok, Tab} ->
+ Fun = fun({_, _, List}) ->
+ {continue, list2cs(List)}
+ end,
+ Cstructs = dets:traverse(Tab, Fun),
+ dets:close(Tab),
+ {ok, Cstructs};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ false ->
+ {error, "No schema file exists"}
+ end.
+
+%% We run a very special type of transactions when we
+%% we want to manipulate the schema.
+
+get_tid_ts_and_lock(Tab, Intent) ->
+ TidTs = get(mnesia_activity_state),
+ case TidTs of
+ {_Mod, Tid, Ts} when record(Ts, tidstore)->
+ Store = Ts#tidstore.store,
+ case Intent of
+ read -> mnesia_locker:rlock_table(Tid, Store, Tab);
+ write -> mnesia_locker:wlock_table(Tid, Store, Tab);
+ none -> ignore
+ end,
+ TidTs;
+ _ ->
+ mnesia:abort(no_transaction)
+ end.
+
+schema_transaction(Fun) ->
+ case get(mnesia_activity_state) of
+ undefined ->
+ Args = [self(), Fun, whereis(mnesia_controller)],
+ Pid = spawn_link(?MODULE, schema_coordinator, Args),
+ receive
+ {transaction_done, Res, Pid} -> Res;
+ {'EXIT', Pid, R} -> {aborted, {transaction_crashed, R}}
+ end;
+ _ ->
+ {aborted, nested_transaction}
+ end.
+
+%% This process may dump the transaction log, and should
+%% therefore not be run in an application process
+%%
+schema_coordinator(Client, _Fun, undefined) ->
+ Res = {aborted, {node_not_running, node()}},
+ Client ! {transaction_done, Res, self()},
+ unlink(Client);
+
+schema_coordinator(Client, Fun, Controller) when pid(Controller) ->
+ %% Do not trap exit in order to automatically die
+ %% when the controller dies
+
+ link(Controller),
+ unlink(Client),
+
+ %% Fulfull the transaction even if the client dies
+ Res = mnesia:transaction(Fun),
+ Client ! {transaction_done, Res, self()},
+ unlink(Controller), % Avoids spurious exit message
+ unlink(whereis(mnesia_tm)), % Avoids spurious exit message
+ exit(normal).
+
+%% The make* rotines return a list of ops, this function
+%% inserts em all in the Store and maintains the local order
+%% of ops.
+
+insert_schema_ops({_Mod, _Tid, Ts}, SchemaIOps) ->
+ do_insert_schema_ops(Ts#tidstore.store, SchemaIOps).
+
+do_insert_schema_ops(Store, [Head | Tail]) ->
+ ?ets_insert(Store, Head),
+ do_insert_schema_ops(Store, Tail);
+do_insert_schema_ops(_Store, []) ->
+ ok.
+
+cs2list(Cs) when record(Cs, cstruct) ->
+ Tags = record_info(fields, cstruct),
+ rec2list(Tags, 2, Cs);
+cs2list(CreateList) when list(CreateList) ->
+ CreateList.
+
+rec2list([Tag | Tags], Pos, Rec) ->
+ Val = element(Pos, Rec),
+ [{Tag, Val} | rec2list(Tags, Pos + 1, Rec)];
+rec2list([], _Pos, _Rec) ->
+ [].
+
+list2cs(List) when list(List) ->
+ Name = pick(unknown, name, List, must),
+ Type = pick(Name, type, List, set),
+ Rc0 = pick(Name, ram_copies, List, []),
+ Dc = pick(Name, disc_copies, List, []),
+ Doc = pick(Name, disc_only_copies, List, []),
+ Rc = case {Rc0, Dc, Doc} of
+ {[], [], []} -> [node()];
+ _ -> Rc0
+ end,
+ LC = pick(Name, local_content, List, false),
+ RecName = pick(Name, record_name, List, Name),
+ Attrs = pick(Name, attributes, List, [key, val]),
+ Snmp = pick(Name, snmp, List, []),
+ LoadOrder = pick(Name, load_order, List, 0),
+ AccessMode = pick(Name, access_mode, List, read_write),
+ UserProps = pick(Name, user_properties, List, []),
+ verify({alt, [nil, list]}, mnesia_lib:etype(UserProps),
+ {bad_type, Name, {user_properties, UserProps}}),
+ Cookie = pick(Name, cookie, List, ?unique_cookie),
+ Version = pick(Name, version, List, {{2, 0}, []}),
+ Ix = pick(Name, index, List, []),
+ verify({alt, [nil, list]}, mnesia_lib:etype(Ix),
+ {bad_type, Name, {index, [Ix]}}),
+ Ix2 = [attr_to_pos(I, Attrs) || I <- Ix],
+
+ Frag = pick(Name, frag_properties, List, []),
+ verify({alt, [nil, list]}, mnesia_lib:etype(Frag),
+ {badarg, Name, {frag_properties, Frag}}),
+
+ Keys = check_keys(Name, List, record_info(fields, cstruct)),
+ check_duplicates(Name, Keys),
+ #cstruct{name = Name,
+ ram_copies = Rc,
+ disc_copies = Dc,
+ disc_only_copies = Doc,
+ type = Type,
+ index = Ix2,
+ snmp = Snmp,
+ load_order = LoadOrder,
+ access_mode = AccessMode,
+ local_content = LC,
+ record_name = RecName,
+ attributes = Attrs,
+ user_properties = lists:sort(UserProps),
+ frag_properties = lists:sort(Frag),
+ cookie = Cookie,
+ version = Version};
+list2cs(Other) ->
+ mnesia:abort({badarg, Other}).
+
+pick(Tab, Key, List, Default) ->
+ case lists:keysearch(Key, 1, List) of
+ false when Default == must ->
+ mnesia:abort({badarg, Tab, "Missing key", Key, List});
+ false ->
+ Default;
+ {value, {Key, Value}} ->
+ Value;
+ {value, BadArg} ->
+ mnesia:abort({bad_type, Tab, BadArg})
+ end.
+
+%% Convert attribute name to integer if neccessary
+attr_tab_to_pos(_Tab, Pos) when integer(Pos) ->
+ Pos;
+attr_tab_to_pos(Tab, Attr) ->
+ attr_to_pos(Attr, val({Tab, attributes})).
+
+%% Convert attribute name to integer if neccessary
+attr_to_pos(Pos, _Attrs) when integer(Pos) ->
+ Pos;
+attr_to_pos(Attr, Attrs) when atom(Attr) ->
+ attr_to_pos(Attr, Attrs, 2);
+attr_to_pos(Attr, _) ->
+ mnesia:abort({bad_type, Attr}).
+
+attr_to_pos(Attr, [Attr | _Attrs], Pos) ->
+ Pos;
+attr_to_pos(Attr, [_ | Attrs], Pos) ->
+ attr_to_pos(Attr, Attrs, Pos + 1);
+attr_to_pos(Attr, _, _) ->
+ mnesia:abort({bad_type, Attr}).
+
+check_keys(Tab, [{Key, _Val} | Tail], Items) ->
+ case lists:member(Key, Items) of
+ true -> [Key | check_keys(Tab, Tail, Items)];
+ false -> mnesia:abort({badarg, Tab, Key})
+ end;
+check_keys(_, [], _) ->
+ [];
+check_keys(Tab, Arg, _) ->
+ mnesia:abort({badarg, Tab, Arg}).
+
+check_duplicates(Tab, Keys) ->
+ case has_duplicates(Keys) of
+ false -> ok;
+ true -> mnesia:abort({badarg, Tab, "Duplicate keys", Keys})
+ end.
+
+has_duplicates([H | T]) ->
+ case lists:member(H, T) of
+ true -> true;
+ false -> has_duplicates(T)
+ end;
+has_duplicates([]) ->
+ false.
+
+%% This is the only place where we check the validity of data
+verify_cstruct(Cs) when record(Cs, cstruct) ->
+ verify_nodes(Cs),
+
+ Tab = Cs#cstruct.name,
+ verify(atom, mnesia_lib:etype(Tab), {bad_type, Tab}),
+ Type = Cs#cstruct.type,
+ verify(true, lists:member(Type, [set, bag, ordered_set]),
+ {bad_type, Tab, {type, Type}}),
+
+ %% Currently ordered_set is not supported for disk_only_copies.
+ if
+ Type == ordered_set, Cs#cstruct.disc_only_copies /= [] ->
+ mnesia:abort({bad_type, Tab, {not_supported, Type, disc_only_copies}});
+ true ->
+ ok
+ end,
+
+ RecName = Cs#cstruct.record_name,
+ verify(atom, mnesia_lib:etype(RecName),
+ {bad_type, Tab, {record_name, RecName}}),
+
+ Attrs = Cs#cstruct.attributes,
+ verify(list, mnesia_lib:etype(Attrs),
+ {bad_type, Tab, {attributes, Attrs}}),
+
+ Arity = length(Attrs) + 1,
+ verify(true, Arity > 2, {bad_type, Tab, {attributes, Attrs}}),
+
+ lists:foldl(fun(Attr,_Other) when Attr == snmp ->
+ mnesia:abort({bad_type, Tab, {attributes, [Attr]}});
+ (Attr,Other) ->
+ verify(atom, mnesia_lib:etype(Attr),
+ {bad_type, Tab, {attributes, [Attr]}}),
+ verify(false, lists:member(Attr, Other),
+ {combine_error, Tab, {attributes, [Attr | Other]}}),
+ [Attr | Other]
+ end,
+ [],
+ Attrs),
+
+ Index = Cs#cstruct.index,
+ verify({alt, [nil, list]}, mnesia_lib:etype(Index),
+ {bad_type, Tab, {index, Index}}),
+
+ IxFun =
+ fun(Pos) ->
+ verify(true, fun() ->
+ if
+ integer(Pos),
+ Pos > 2,
+ Pos =< Arity ->
+ true;
+ true -> false
+ end
+ end,
+ {bad_type, Tab, {index, [Pos]}})
+ end,
+ lists:foreach(IxFun, Index),
+
+ LC = Cs#cstruct.local_content,
+ verify({alt, [true, false]}, LC,
+ {bad_type, Tab, {local_content, LC}}),
+ Access = Cs#cstruct.access_mode,
+ verify({alt, [read_write, read_only]}, Access,
+ {bad_type, Tab, {access_mode, Access}}),
+
+ Snmp = Cs#cstruct.snmp,
+ verify(true, mnesia_snmp_hook:check_ustruct(Snmp),
+ {badarg, Tab, {snmp, Snmp}}),
+
+ CheckProp = fun(Prop) when tuple(Prop), size(Prop) >= 1 -> ok;
+ (Prop) -> mnesia:abort({bad_type, Tab, {user_properties, [Prop]}})
+ end,
+ lists:foreach(CheckProp, Cs#cstruct.user_properties),
+
+ case Cs#cstruct.cookie of
+ {{MegaSecs, Secs, MicroSecs}, _Node}
+ when integer(MegaSecs), integer(Secs),
+ integer(MicroSecs), atom(node) ->
+ ok;
+ Cookie ->
+ mnesia:abort({bad_type, Tab, {cookie, Cookie}})
+ end,
+ case Cs#cstruct.version of
+ {{Major, Minor}, _Detail}
+ when integer(Major), integer(Minor) ->
+ ok;
+ Version ->
+ mnesia:abort({bad_type, Tab, {version, Version}})
+ end.
+
+verify_nodes(Cs) ->
+ Tab = Cs#cstruct.name,
+ Ram = Cs#cstruct.ram_copies,
+ Disc = Cs#cstruct.disc_copies,
+ DiscOnly = Cs#cstruct.disc_only_copies,
+ LoadOrder = Cs#cstruct.load_order,
+
+ verify({alt, [nil, list]}, mnesia_lib:etype(Ram),
+ {bad_type, Tab, {ram_copies, Ram}}),
+ verify({alt, [nil, list]}, mnesia_lib:etype(Disc),
+ {bad_type, Tab, {disc_copies, Disc}}),
+ case Tab of
+ schema ->
+ verify([], DiscOnly, {bad_type, Tab, {disc_only_copies, DiscOnly}});
+ _ ->
+ verify({alt, [nil, list]},
+ mnesia_lib:etype(DiscOnly),
+ {bad_type, Tab, {disc_only_copies, DiscOnly}})
+ end,
+ verify(integer, mnesia_lib:etype(LoadOrder),
+ {bad_type, Tab, {load_order, LoadOrder}}),
+
+ Nodes = Ram ++ Disc ++ DiscOnly,
+ verify(list, mnesia_lib:etype(Nodes),
+ {combine_error, Tab,
+ [{ram_copies, []}, {disc_copies, []}, {disc_only_copies, []}]}),
+ verify(false, has_duplicates(Nodes), {combine_error, Tab, Nodes}),
+ AtomCheck = fun(N) -> verify(atom, mnesia_lib:etype(N), {bad_type, Tab, N}) end,
+ lists:foreach(AtomCheck, Nodes).
+
+verify(Expected, Fun, Error) when function(Fun) ->
+ do_verify(Expected, catch Fun(), Error);
+verify(Expected, Actual, Error) ->
+ do_verify(Expected, Actual, Error).
+
+do_verify({alt, Values}, Value, Error) ->
+ case lists:member(Value, Values) of
+ true -> ok;
+ false -> mnesia:abort(Error)
+ end;
+do_verify(Value, Value, _) ->
+ ok;
+do_verify(_Value, _, Error) ->
+ mnesia:abort(Error).
+
+ensure_writable(Tab) ->
+ case val({Tab, where_to_write}) of
+ [] -> mnesia:abort({read_only, Tab});
+ _ -> ok
+ end.
+
+%% Ensure that all replicas on disk full nodes are active
+ensure_active(Cs) ->
+ ensure_active(Cs, active_replicas).
+
+ensure_active(Cs, What) ->
+ Tab = Cs#cstruct.name,
+ case val({Tab, What}) of
+ [] -> mnesia:abort({no_exists, Tab});
+ _ -> ok
+ end,
+ Nodes = mnesia_lib:intersect(val({schema, disc_copies}),
+ mnesia_lib:cs_to_nodes(Cs)),
+ W = {Tab, What},
+ case Nodes -- val(W) of
+ [] ->
+ ok;
+ Ns ->
+ Expl = "All replicas on diskfull nodes are not active yet",
+ case val({Tab, local_content}) of
+ true ->
+ case rpc:multicall(Ns, ?MODULE, is_remote_member, [W]) of
+ {Replies, []} ->
+ check_active(Replies, Expl, Tab);
+ {_Replies, BadNs} ->
+ mnesia:abort({not_active, Expl, Tab, BadNs})
+ end;
+ false ->
+ mnesia:abort({not_active, Expl, Tab, Ns})
+ end
+ end.
+
+ensure_not_active(schema, Node) ->
+ case lists:member(Node, val({schema, active_replicas})) of
+ false ->
+ ok;
+ true ->
+ Expl = "Mnesia is running",
+ mnesia:abort({active, Expl, Node})
+ end.
+
+is_remote_member(Key) ->
+ IsActive = lists:member(node(), val(Key)),
+ {IsActive, node()}.
+
+check_active([{true, _Node} | Replies], Expl, Tab) ->
+ check_active(Replies, Expl, Tab);
+check_active([{false, Node} | _Replies], Expl, Tab) ->
+ mnesia:abort({not_active, Expl, Tab, [Node]});
+check_active([{badrpc, Reason} | _Replies], Expl, Tab) ->
+ mnesia:abort({not_active, Expl, Tab, Reason});
+check_active([], _Expl, _Tab) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Here's the real interface function to create a table
+
+create_table(TabDef) ->
+ schema_transaction(fun() -> do_multi_create_table(TabDef) end).
+
+%% And the corresponding do routines ....
+
+do_multi_create_table(TabDef) ->
+ get_tid_ts_and_lock(schema, write),
+ ensure_writable(schema),
+ Cs = list2cs(TabDef),
+ case Cs#cstruct.frag_properties of
+ [] ->
+ do_create_table(Cs);
+ _Props ->
+ CsList = mnesia_frag:expand_cstruct(Cs),
+ lists:foreach(fun do_create_table/1, CsList)
+ end,
+ ok.
+
+do_create_table(Cs) ->
+ {_Mod, _Tid, Ts} = get_tid_ts_and_lock(schema, none),
+ Store = Ts#tidstore.store,
+ do_insert_schema_ops(Store, make_create_table(Cs)).
+
+make_create_table(Cs) ->
+ Tab = Cs#cstruct.name,
+ verify('EXIT', element(1, ?catch_val({Tab, cstruct})),
+ {already_exists, Tab}),
+ unsafe_make_create_table(Cs).
+
+% unsafe_do_create_table(Cs) ->
+% {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, none),
+% Store = Ts#tidstore.store,
+% do_insert_schema_ops(Store, unsafe_make_create_table(Cs)).
+
+unsafe_make_create_table(Cs) ->
+ {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, none),
+ verify_cstruct(Cs),
+ Tab = Cs#cstruct.name,
+
+ %% Check that we have all disc replica nodes running
+ DiscNodes = Cs#cstruct.disc_copies ++ Cs#cstruct.disc_only_copies,
+ RunningNodes = val({current, db_nodes}),
+ CheckDisc = fun(N) ->
+ verify(true, lists:member(N, RunningNodes),
+ {not_active, Tab, N})
+ end,
+ lists:foreach(CheckDisc, DiscNodes),
+
+ Nodes = mnesia_lib:intersect(mnesia_lib:cs_to_nodes(Cs), RunningNodes),
+ Store = Ts#tidstore.store,
+ mnesia_locker:wlock_no_exist(Tid, Store, Tab, Nodes),
+ [{op, create_table, cs2list(Cs)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delete a table entirely on all nodes.
+
+delete_table(Tab) ->
+ schema_transaction(fun() -> do_delete_table(Tab) end).
+
+do_delete_table(schema) ->
+ mnesia:abort({bad_type, schema});
+do_delete_table(Tab) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ ensure_writable(schema),
+ insert_schema_ops(TidTs, make_delete_table(Tab, whole_table)).
+
+make_delete_table(Tab, Mode) ->
+ case Mode of
+ whole_table ->
+ case val({Tab, frag_properties}) of
+ [] ->
+ [make_delete_table2(Tab)];
+ _Props ->
+ %% Check if it is a base table
+ mnesia_frag:lookup_frag_hash(Tab),
+
+ %% Check for foreigners
+ F = mnesia_frag:lookup_foreigners(Tab),
+ verify([], F, {combine_error, Tab, "Too many foreigners", F}),
+ [make_delete_table2(T) || T <- mnesia_frag:frag_names(Tab)]
+ end;
+ single_frag ->
+ [make_delete_table2(Tab)]
+ end.
+
+make_delete_table2(Tab) ->
+ get_tid_ts_and_lock(Tab, write),
+ Cs = val({Tab, cstruct}),
+ ensure_active(Cs),
+ ensure_writable(Tab),
+ {op, delete_table, cs2list(Cs)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Change fragmentation of a table
+
+change_table_frag(Tab, Change) ->
+ schema_transaction(fun() -> do_change_table_frag(Tab, Change) end).
+
+do_change_table_frag(Tab, Change) when atom(Tab), Tab /= schema ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ Ops = mnesia_frag:change_table_frag(Tab, Change),
+ [insert_schema_ops(TidTs, Op) || Op <- Ops],
+ ok;
+do_change_table_frag(Tab, _Change) ->
+ mnesia:abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Clear a table
+
+clear_table(Tab) ->
+ schema_transaction(fun() -> do_clear_table(Tab) end).
+
+do_clear_table(schema) ->
+ mnesia:abort({bad_type, schema});
+do_clear_table(Tab) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, write),
+ insert_schema_ops(TidTs, make_clear_table(Tab)).
+
+make_clear_table(Tab) ->
+ ensure_writable(schema),
+ Cs = val({Tab, cstruct}),
+ ensure_active(Cs),
+ ensure_writable(Tab),
+ [{op, clear_table, cs2list(Cs)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_table_copy(Tab, Node, Storage) ->
+ schema_transaction(fun() -> do_add_table_copy(Tab, Node, Storage) end).
+
+do_add_table_copy(Tab, Node, Storage) when atom(Tab), atom(Node) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ insert_schema_ops(TidTs, make_add_table_copy(Tab, Node, Storage));
+do_add_table_copy(Tab,Node,_) ->
+ mnesia:abort({badarg, Tab, Node}).
+
+make_add_table_copy(Tab, Node, Storage) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ Ns = mnesia_lib:cs_to_nodes(Cs),
+ verify(false, lists:member(Node, Ns), {already_exists, Tab, Node}),
+ Cs2 = new_cs(Cs, Node, Storage, add),
+ verify_cstruct(Cs2),
+
+ %% Check storage and if node is running
+ IsRunning = lists:member(Node, val({current, db_nodes})),
+ if
+ Storage == unknown ->
+ mnesia:abort({badarg, Tab, Storage});
+ Tab == schema ->
+ if
+ Storage /= ram_copies ->
+ mnesia:abort({badarg, Tab, Storage});
+ IsRunning == true ->
+ mnesia:abort({already_exists, Tab, Node});
+ true ->
+ ignore
+ end;
+ Storage == ram_copies ->
+ ignore;
+ IsRunning == true ->
+ ignore;
+ IsRunning == false ->
+ mnesia:abort({not_active, schema, Node})
+ end,
+ [{op, add_table_copy, Storage, Node, cs2list(Cs2)}].
+
+del_table_copy(Tab, Node) ->
+ schema_transaction(fun() -> do_del_table_copy(Tab, Node) end).
+
+do_del_table_copy(Tab, Node) when atom(Node) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+%% get_tid_ts_and_lock(Tab, write),
+ insert_schema_ops(TidTs, make_del_table_copy(Tab, Node));
+do_del_table_copy(Tab, Node) ->
+ mnesia:abort({badarg, Tab, Node}).
+
+make_del_table_copy(Tab, Node) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ Storage = mnesia_lib:schema_cs_to_storage_type(Node, Cs),
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ case mnesia_lib:cs_to_nodes(Cs2) of
+ [] when Tab == schema ->
+ mnesia:abort({combine_error, Tab, "Last replica"});
+ [] ->
+ ensure_active(Cs),
+ dbg_out("Last replica deleted in table ~p~n", [Tab]),
+ make_delete_table(Tab, whole_table);
+ _ when Tab == schema ->
+ ensure_active(Cs2),
+ ensure_not_active(Tab, Node),
+ verify_cstruct(Cs2),
+ Ops = remove_node_from_tabs(val({schema, tables}), Node),
+ [{op, del_table_copy, ram_copies, Node, cs2list(Cs2)} | Ops];
+ _ ->
+ ensure_active(Cs),
+ verify_cstruct(Cs2),
+ [{op, del_table_copy, Storage, Node, cs2list(Cs2)}]
+ end.
+
+remove_node_from_tabs([], _Node) ->
+ [];
+remove_node_from_tabs([schema|Rest], Node) ->
+ remove_node_from_tabs(Rest, Node);
+remove_node_from_tabs([Tab|Rest], Node) ->
+ {Cs, IsFragModified} =
+ mnesia_frag:remove_node(Node, incr_version(val({Tab, cstruct}))),
+ case mnesia_lib:schema_cs_to_storage_type(Node, Cs) of
+ unknown ->
+ case IsFragModified of
+ true ->
+ [{op, change_table_frag, {del_node, Node}, cs2list(Cs)} |
+ remove_node_from_tabs(Rest, Node)];
+ false ->
+ remove_node_from_tabs(Rest, Node)
+ end;
+ Storage ->
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ case mnesia_lib:cs_to_nodes(Cs2) of
+ [] ->
+ [{op, delete_table, cs2list(Cs)} |
+ remove_node_from_tabs(Rest, Node)];
+ _Ns ->
+ verify_cstruct(Cs2),
+ [{op, del_table_copy, ram_copies, Node, cs2list(Cs2)}|
+ remove_node_from_tabs(Rest, Node)]
+ end
+ end.
+
+new_cs(Cs, Node, ram_copies, add) ->
+ Cs#cstruct{ram_copies = opt_add(Node, Cs#cstruct.ram_copies)};
+new_cs(Cs, Node, disc_copies, add) ->
+ Cs#cstruct{disc_copies = opt_add(Node, Cs#cstruct.disc_copies)};
+new_cs(Cs, Node, disc_only_copies, add) ->
+ Cs#cstruct{disc_only_copies = opt_add(Node, Cs#cstruct.disc_only_copies)};
+new_cs(Cs, Node, ram_copies, del) ->
+ Cs#cstruct{ram_copies = lists:delete(Node , Cs#cstruct.ram_copies)};
+new_cs(Cs, Node, disc_copies, del) ->
+ Cs#cstruct{disc_copies = lists:delete(Node , Cs#cstruct.disc_copies)};
+new_cs(Cs, Node, disc_only_copies, del) ->
+ Cs#cstruct{disc_only_copies =
+ lists:delete(Node , Cs#cstruct.disc_only_copies)};
+new_cs(Cs, _Node, Storage, _Op) ->
+ mnesia:abort({badarg, Cs#cstruct.name, Storage}).
+
+
+opt_add(N, L) -> [N | lists:delete(N, L)].
+
+move_table(Tab, FromNode, ToNode) ->
+ schema_transaction(fun() -> do_move_table(Tab, FromNode, ToNode) end).
+
+do_move_table(schema, _FromNode, _ToNode) ->
+ mnesia:abort({bad_type, schema});
+do_move_table(Tab, FromNode, ToNode) when atom(FromNode), atom(ToNode) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ insert_schema_ops(TidTs, make_move_table(Tab, FromNode, ToNode));
+do_move_table(Tab, FromNode, ToNode) ->
+ mnesia:abort({badarg, Tab, FromNode, ToNode}).
+
+make_move_table(Tab, FromNode, ToNode) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ Ns = mnesia_lib:cs_to_nodes(Cs),
+ verify(false, lists:member(ToNode, Ns), {already_exists, Tab, ToNode}),
+ verify(true, lists:member(FromNode, val({Tab, where_to_write})),
+ {not_active, Tab, FromNode}),
+ verify(false, val({Tab,local_content}),
+ {"Cannot move table with local content", Tab}),
+ ensure_active(Cs),
+ Running = val({current, db_nodes}),
+ Storage = mnesia_lib:schema_cs_to_storage_type(FromNode, Cs),
+ verify(true, lists:member(ToNode, Running), {not_active, schema, ToNode}),
+
+ Cs2 = new_cs(Cs, ToNode, Storage, add),
+ Cs3 = new_cs(Cs2, FromNode, Storage, del),
+ verify_cstruct(Cs3),
+ [{op, add_table_copy, Storage, ToNode, cs2list(Cs2)},
+ {op, sync_trans},
+ {op, del_table_copy, Storage, FromNode, cs2list(Cs3)}].
+
+%% end of functions to add and delete nodes to tables
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+
+change_table_copy_type(Tab, Node, ToS) ->
+ schema_transaction(fun() -> do_change_table_copy_type(Tab, Node, ToS) end).
+
+do_change_table_copy_type(Tab, Node, ToS) when atom(Node) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, write), % ensure global sync
+ %% get_tid_ts_and_lock(Tab, read),
+ insert_schema_ops(TidTs, make_change_table_copy_type(Tab, Node, ToS));
+do_change_table_copy_type(Tab, Node, _ToS) ->
+ mnesia:abort({badarg, Tab, Node}).
+
+make_change_table_copy_type(Tab, Node, unknown) ->
+ make_del_table_copy(Tab, Node);
+make_change_table_copy_type(Tab, Node, ToS) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ FromS = mnesia_lib:storage_type_at_node(Node, Tab),
+
+ case compare_storage_type(false, FromS, ToS) of
+ {same, _} ->
+ mnesia:abort({already_exists, Tab, Node, ToS});
+ {diff, _} ->
+ ignore;
+ incompatible ->
+ ensure_active(Cs)
+ end,
+
+ Cs2 = new_cs(Cs, Node, FromS, del),
+ Cs3 = new_cs(Cs2, Node, ToS, add),
+ verify_cstruct(Cs3),
+
+ if
+ FromS == unknown ->
+ make_add_table_copy(Tab, Node, ToS);
+ true ->
+ ignore
+ end,
+
+ [{op, change_table_copy_type, Node, FromS, ToS, cs2list(Cs3)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% change index functions ....
+%% Pos is allready added by 1 in both of these functions
+
+add_table_index(Tab, Pos) ->
+ schema_transaction(fun() -> do_add_table_index(Tab, Pos) end).
+
+do_add_table_index(schema, _Attr) ->
+ mnesia:abort({bad_type, schema});
+do_add_table_index(Tab, Attr) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ Pos = attr_tab_to_pos(Tab, Attr),
+ insert_schema_ops(TidTs, make_add_table_index(Tab, Pos)).
+
+make_add_table_index(Tab, Pos) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ Ix = Cs#cstruct.index,
+ verify(false, lists:member(Pos, Ix), {already_exists, Tab, Pos}),
+ Ix2 = lists:sort([Pos | Ix]),
+ Cs2 = Cs#cstruct{index = Ix2},
+ verify_cstruct(Cs2),
+ [{op, add_index, Pos, cs2list(Cs2)}].
+
+del_table_index(Tab, Pos) ->
+ schema_transaction(fun() -> do_del_table_index(Tab, Pos) end).
+
+do_del_table_index(schema, _Attr) ->
+ mnesia:abort({bad_type, schema});
+do_del_table_index(Tab, Attr) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ Pos = attr_tab_to_pos(Tab, Attr),
+ insert_schema_ops(TidTs, make_del_table_index(Tab, Pos)).
+
+make_del_table_index(Tab, Pos) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ Ix = Cs#cstruct.index,
+ verify(true, lists:member(Pos, Ix), {no_exists, Tab, Pos}),
+ Cs2 = Cs#cstruct{index = lists:delete(Pos, Ix)},
+ verify_cstruct(Cs2),
+ [{op, del_index, Pos, cs2list(Cs2)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_snmp(Tab, Ustruct) ->
+ schema_transaction(fun() -> do_add_snmp(Tab, Ustruct) end).
+
+do_add_snmp(schema, _Ustruct) ->
+ mnesia:abort({bad_type, schema});
+do_add_snmp(Tab, Ustruct) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ insert_schema_ops(TidTs, make_add_snmp(Tab, Ustruct)).
+
+make_add_snmp(Tab, Ustruct) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ verify([], Cs#cstruct.snmp, {already_exists, Tab, snmp}),
+ Error = {badarg, Tab, snmp, Ustruct},
+ verify(true, mnesia_snmp_hook:check_ustruct(Ustruct), Error),
+ Cs2 = Cs#cstruct{snmp = Ustruct},
+ verify_cstruct(Cs2),
+ [{op, add_snmp, Ustruct, cs2list(Cs2)}].
+
+del_snmp(Tab) ->
+ schema_transaction(fun() -> do_del_snmp(Tab) end).
+
+do_del_snmp(schema) ->
+ mnesia:abort({bad_type, schema});
+do_del_snmp(Tab) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ insert_schema_ops(TidTs, make_del_snmp(Tab)).
+
+make_del_snmp(Tab) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ Cs2 = Cs#cstruct{snmp = []},
+ verify_cstruct(Cs2),
+ [{op, del_snmp, cs2list(Cs2)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+
+transform_table(Tab, Fun, NewAttrs, NewRecName)
+ when function(Fun), list(NewAttrs), atom(NewRecName) ->
+ schema_transaction(fun() -> do_transform_table(Tab, Fun, NewAttrs, NewRecName) end);
+
+transform_table(Tab, ignore, NewAttrs, NewRecName)
+ when list(NewAttrs), atom(NewRecName) ->
+ schema_transaction(fun() -> do_transform_table(Tab, ignore, NewAttrs, NewRecName) end);
+
+transform_table(Tab, Fun, NewAttrs, NewRecName) ->
+ {aborted,{bad_type, Tab, Fun, NewAttrs, NewRecName}}.
+
+do_transform_table(schema, _Fun, _NewAttrs, _NewRecName) ->
+ mnesia:abort({bad_type, schema});
+do_transform_table(Tab, Fun, NewAttrs, NewRecName) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, write),
+ insert_schema_ops(TidTs, make_transform(Tab, Fun, NewAttrs, NewRecName)).
+
+make_transform(Tab, Fun, NewAttrs, NewRecName) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ ensure_writable(Tab),
+ case mnesia_lib:val({Tab, index}) of
+ [] ->
+ Cs2 = Cs#cstruct{attributes = NewAttrs, record_name = NewRecName},
+ verify_cstruct(Cs2),
+ [{op, transform, Fun, cs2list(Cs2)}];
+ PosList ->
+ DelIdx = fun(Pos, Ncs) ->
+ Ix = Ncs#cstruct.index,
+ Ncs1 = Ncs#cstruct{index = lists:delete(Pos, Ix)},
+ Op = {op, del_index, Pos, cs2list(Ncs1)},
+ {Op, Ncs1}
+ end,
+ AddIdx = fun(Pos, Ncs) ->
+ Ix = Ncs#cstruct.index,
+ Ix2 = lists:sort([Pos | Ix]),
+ Ncs1 = Ncs#cstruct{index = Ix2},
+ Op = {op, add_index, Pos, cs2list(Ncs1)},
+ {Op, Ncs1}
+ end,
+ {DelOps, Cs1} = lists:mapfoldl(DelIdx, Cs, PosList),
+ Cs2 = Cs1#cstruct{attributes = NewAttrs, record_name = NewRecName},
+ {AddOps, Cs3} = lists:mapfoldl(AddIdx, Cs2, PosList),
+ verify_cstruct(Cs3),
+ lists:flatten([DelOps, {op, transform, Fun, cs2list(Cs2)}, AddOps])
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+
+change_table_access_mode(Tab, Mode) ->
+ schema_transaction(fun() -> do_change_table_access_mode(Tab, Mode) end).
+
+do_change_table_access_mode(Tab, Mode) ->
+ {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, write),
+ Store = Ts#tidstore.store,
+ mnesia_locker:wlock_no_exist(Tid, Store, schema, val({schema, active_replicas})),
+ mnesia_locker:wlock_no_exist(Tid, Store, Tab, val({Tab, active_replicas})),
+ do_insert_schema_ops(Store, make_change_table_access_mode(Tab, Mode)).
+
+make_change_table_access_mode(Tab, Mode) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ OldMode = Cs#cstruct.access_mode,
+ verify(false, OldMode == Mode, {already_exists, Tab, Mode}),
+ Cs2 = Cs#cstruct{access_mode = Mode},
+ verify_cstruct(Cs2),
+ [{op, change_table_access_mode, cs2list(Cs2), OldMode, Mode}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+change_table_load_order(Tab, LoadOrder) ->
+ schema_transaction(fun() -> do_change_table_load_order(Tab, LoadOrder) end).
+
+do_change_table_load_order(schema, _LoadOrder) ->
+ mnesia:abort({bad_type, schema});
+do_change_table_load_order(Tab, LoadOrder) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, none),
+ insert_schema_ops(TidTs, make_change_table_load_order(Tab, LoadOrder)).
+
+make_change_table_load_order(Tab, LoadOrder) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ OldLoadOrder = Cs#cstruct.load_order,
+ Cs2 = Cs#cstruct{load_order = LoadOrder},
+ verify_cstruct(Cs2),
+ [{op, change_table_load_order, cs2list(Cs2), OldLoadOrder, LoadOrder}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+write_table_property(Tab, Prop) when tuple(Prop), size(Prop) >= 1 ->
+ schema_transaction(fun() -> do_write_table_property(Tab, Prop) end);
+write_table_property(Tab, Prop) ->
+ {aborted, {bad_type, Tab, Prop}}.
+do_write_table_property(Tab, Prop) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ {_, _, Ts} = TidTs,
+ Store = Ts#tidstore.store,
+ case change_prop_in_existing_op(Tab, Prop, write_property, Store) of
+ true ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,write_property,Store) -> true~n",
+ [Tab,Prop]),
+ %% we have merged the table prop into the create_table op
+ ok;
+ false ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,write_property,Store) -> false~n",
+ [Tab,Prop]),
+ %% this must be an existing table
+ get_tid_ts_and_lock(Tab, none),
+ insert_schema_ops(TidTs, make_write_table_properties(Tab, [Prop]))
+ end.
+
+make_write_table_properties(Tab, Props) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ make_write_table_properties(Tab, Props, Cs).
+
+make_write_table_properties(Tab, [Prop | Props], Cs) ->
+ OldProps = Cs#cstruct.user_properties,
+ PropKey = element(1, Prop),
+ DelProps = lists:keydelete(PropKey, 1, OldProps),
+ MergedProps = lists:merge(DelProps, [Prop]),
+ Cs2 = Cs#cstruct{user_properties = MergedProps},
+ verify_cstruct(Cs2),
+ [{op, write_property, cs2list(Cs2), Prop} |
+ make_write_table_properties(Tab, Props, Cs2)];
+make_write_table_properties(_Tab, [], _Cs) ->
+ [].
+
+change_prop_in_existing_op(Tab, Prop, How, Store) ->
+ Ops = ets:match_object(Store, '_'),
+ case update_existing_op(Ops, Tab, Prop, How, []) of
+ {true, Ops1} ->
+ ets:match_delete(Store, '_'),
+ [ets:insert(Store, Op) || Op <- Ops1],
+ true;
+ false ->
+ false
+ end.
+
+update_existing_op([{op, Op, L = [{name,Tab}|_], _OldProp}|Ops],
+ Tab, Prop, How, Acc) when Op == write_property;
+ Op == delete_property ->
+ %% Apparently, mnesia_dumper doesn't care about OldProp here -- just L,
+ %% so we will throw away OldProp (not that it matters...) and insert Prop.
+ %% as element 3.
+ L1 = insert_prop(Prop, L, How),
+ NewOp = {op, How, L1, Prop},
+ {true, lists:reverse(Acc) ++ [NewOp|Ops]};
+update_existing_op([Op = {op, create_table, L}|Ops], Tab, Prop, How, Acc) ->
+ case lists:keysearch(name, 1, L) of
+ {value, {_, Tab}} ->
+ %% Tab is being created here -- insert Prop into L
+ L1 = insert_prop(Prop, L, How),
+ {true, lists:reverse(Acc) ++ [{op, create_table, L1}|Ops]};
+ _ ->
+ update_existing_op(Ops, Tab, Prop, How, [Op|Acc])
+ end;
+update_existing_op([Op|Ops], Tab, Prop, How, Acc) ->
+ update_existing_op(Ops, Tab, Prop, How, [Op|Acc]);
+update_existing_op([], _, _, _, _) ->
+ false.
+
+%% perhaps a misnomer. How could also be delete_property... never mind.
+%% Returns the modified L.
+insert_prop(Prop, L, How) ->
+ Prev = find_props(L),
+ MergedProps = merge_with_previous(How, Prop, Prev),
+ replace_props(L, MergedProps).
+
+
+find_props([{user_properties, P}|_]) -> P;
+find_props([_H|T]) -> find_props(T).
+%% we shouldn't reach []
+
+replace_props([{user_properties, _}|T], P) -> [{user_properties, P}|T];
+replace_props([H|T], P) -> [H|replace_props(T, P)].
+%% again, we shouldn't reach []
+
+merge_with_previous(write_property, Prop, Prev) ->
+ Key = element(1, Prop),
+ Prev1 = lists:keydelete(Key, 1, Prev),
+ lists:sort([Prop|Prev1]);
+merge_with_previous(delete_property, PropKey, Prev) ->
+ lists:keydelete(PropKey, 1, Prev).
+
+delete_table_property(Tab, PropKey) ->
+ schema_transaction(fun() -> do_delete_table_property(Tab, PropKey) end).
+
+do_delete_table_property(Tab, PropKey) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ {_, _, Ts} = TidTs,
+ Store = Ts#tidstore.store,
+ case change_prop_in_existing_op(Tab, PropKey, delete_property, Store) of
+ true ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,delete_property,Store) -> true~n",
+ [Tab,PropKey]),
+ %% we have merged the table prop into the create_table op
+ ok;
+ false ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,delete_property,Store) -> false~n",
+ [Tab,PropKey]),
+ %% this must be an existing table
+ get_tid_ts_and_lock(Tab, none),
+ insert_schema_ops(TidTs,
+ make_delete_table_properties(Tab, [PropKey]))
+ end.
+
+make_delete_table_properties(Tab, PropKeys) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ make_delete_table_properties(Tab, PropKeys, Cs).
+
+make_delete_table_properties(Tab, [PropKey | PropKeys], Cs) ->
+ OldProps = Cs#cstruct.user_properties,
+ Props = lists:keydelete(PropKey, 1, OldProps),
+ Cs2 = Cs#cstruct{user_properties = Props},
+ verify_cstruct(Cs2),
+ [{op, delete_property, cs2list(Cs2), PropKey} |
+ make_delete_table_properties(Tab, PropKeys, Cs2)];
+make_delete_table_properties(_Tab, [], _Cs) ->
+ [].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Ensure that the transaction can be committed even
+%% if the node crashes and Mnesia is restarted
+prepare_commit(Tid, Commit, WaitFor) ->
+ case Commit#commit.schema_ops of
+ [] ->
+ {false, Commit, optional};
+ OrigOps ->
+ {Modified, Ops, DumperMode} =
+ prepare_ops(Tid, OrigOps, WaitFor, false, [], optional),
+ InitBy = schema_prepare,
+ GoodRes = {Modified,
+ Commit#commit{schema_ops = lists:reverse(Ops)},
+ DumperMode},
+ case DumperMode of
+ optional ->
+ dbg_out("Transaction log dump skipped (~p): ~w~n",
+ [DumperMode, InitBy]);
+ mandatory ->
+ case mnesia_controller:sync_dump_log(InitBy) of
+ dumped ->
+ GoodRes;
+ {error, Reason} ->
+ mnesia:abort(Reason)
+ end
+ end,
+ case Ops of
+ [] ->
+ ignore;
+ _ ->
+ %% We need to grab a dumper lock here, the log may not
+ %% be dumped by others, during the schema commit phase.
+ mnesia_controller:wait_for_schema_commit_lock()
+ end,
+ GoodRes
+ end.
+
+prepare_ops(Tid, [Op | Ops], WaitFor, Changed, Acc, DumperMode) ->
+ case prepare_op(Tid, Op, WaitFor) of
+ {true, mandatory} ->
+ prepare_ops(Tid, Ops, WaitFor, Changed, [Op | Acc], mandatory);
+ {true, optional} ->
+ prepare_ops(Tid, Ops, WaitFor, Changed, [Op | Acc], DumperMode);
+ {true, Ops2, mandatory} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Ops2 ++ Acc, mandatory);
+ {true, Ops2, optional} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Ops2 ++ Acc, DumperMode);
+ {false, mandatory} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Acc, mandatory);
+ {false, optional} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Acc, DumperMode)
+ end;
+prepare_ops(_Tid, [], _WaitFor, Changed, Acc, DumperMode) ->
+ {Changed, Acc, DumperMode}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Prepare for commit
+%% returns true if Op should be included, i.e. unmodified
+%% {true, Operation} if NewRecs should be included, i.e. modified
+%% false if Op should NOT be included, i.e. modified
+%%
+prepare_op(_Tid, {op, rec, unknown, Rec}, _WaitFor) ->
+ {{Tab, Key}, Items, _Op} = Rec,
+ case val({Tab, storage_type}) of
+ unknown ->
+ {false, optional};
+ Storage ->
+ mnesia_tm:prepare_snmp(Tab, Key, Items), % May exit
+ {true, [{op, rec, Storage, Rec}], optional}
+ end;
+
+prepare_op(_Tid, {op, announce_im_running, _Node, SchemaDef, Running, RemoteRunning}, _WaitFor) ->
+ SchemaCs = list2cs(SchemaDef),
+ case lists:member(node(), Running) of
+ true ->
+ announce_im_running(RemoteRunning -- Running, SchemaCs);
+ false ->
+ announce_im_running(Running -- RemoteRunning, SchemaCs)
+ end,
+ {false, optional};
+
+prepare_op(_Tid, {op, sync_trans}, {part, CoordPid}) ->
+ CoordPid ! {sync_trans, self()},
+ receive
+ {sync_trans, CoordPid} ->
+ {false, optional};
+ Else ->
+ mnesia_lib:verbose("sync_op terminated due to ~p~n", [Else]),
+ mnesia:abort(Else)
+ end;
+
+prepare_op(_Tid, {op, sync_trans}, {coord, Nodes}) ->
+ case receive_sync(Nodes, []) of
+ {abort, Reason} ->
+ mnesia_lib:verbose("sync_op terminated due to ~p~n", [Reason]),
+ mnesia:abort(Reason);
+ Pids ->
+ [Pid ! {sync_trans, self()} || Pid <- Pids],
+ {false, optional}
+ end;
+prepare_op(Tid, {op, create_table, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ UseDir = mnesia_monitor:use_dir(),
+ Tab = Cs#cstruct.name,
+ case Storage of
+ disc_copies when UseDir == false ->
+ UseDirReason = {bad_type, Tab, Storage, node()},
+ mnesia:abort(UseDirReason);
+ disc_only_copies when UseDir == false ->
+ UseDirReason = {bad_type, Tab, Storage, node()},
+ mnesia:abort(UseDirReason);
+ ram_copies ->
+ create_ram_table(Tab, Cs#cstruct.type),
+ insert_cstruct(Tid, Cs, false),
+ {true, optional};
+ disc_copies ->
+ create_ram_table(Tab, Cs#cstruct.type),
+ create_disc_table(Tab),
+ insert_cstruct(Tid, Cs, false),
+ {true, optional};
+ disc_only_copies ->
+ create_disc_only_table(Tab,Cs#cstruct.type),
+ insert_cstruct(Tid, Cs, false),
+ {true, optional};
+ unknown -> %% No replica on this node
+ insert_cstruct(Tid, Cs, false),
+ {true, optional}
+ end;
+
+prepare_op(Tid, {op, add_table_copy, Storage, Node, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+
+ if
+ Tab == schema ->
+ {true, optional}; % Nothing to prepare
+ Node == node() ->
+ case mnesia_lib:val({schema, storage_type}) of
+ ram_copies when Storage /= ram_copies ->
+ Error = {combine_error, Tab, "has no disc", Node},
+ mnesia:abort(Error);
+ _ ->
+ ok
+ end,
+ %% Tables are created by mnesia_loader get_network code
+ insert_cstruct(Tid, Cs, true),
+ case mnesia_controller:get_network_copy(Tab, Cs) of
+ {loaded, ok} ->
+ {true, optional};
+ {not_loaded, ErrReason} ->
+ Reason = {system_limit, Tab, {Node, ErrReason}},
+ mnesia:abort(Reason)
+ end;
+ Node /= node() ->
+ %% Verify that ram table not has been dumped to disc
+ if
+ Storage /= ram_copies ->
+ case mnesia_lib:schema_cs_to_storage_type(node(), Cs) of
+ ram_copies ->
+ Dat = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dat) of
+ true ->
+ mnesia:abort({combine_error, Tab, Storage,
+ "Table dumped to disc", node()});
+ false ->
+ ok
+ end;
+ _ ->
+ ok
+ end;
+ true ->
+ ok
+ end,
+ insert_cstruct(Tid, Cs, true),
+ {true, optional}
+ end;
+
+prepare_op(Tid, {op, del_table_copy, _Storage, Node, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+
+ if
+ %% Schema table lock is always required to run a schema op.
+ %% No need to look it.
+ node(Tid#tid.pid) == node(), Tab /= schema ->
+ Pid = spawn_link(?MODULE, lock_del_table, [Tab, Node, Cs, self()]),
+ receive
+ {Pid, updated} ->
+ {true, optional};
+ {Pid, FailReason} ->
+ mnesia:abort(FailReason);
+ {'EXIT', Pid, Reason} ->
+ mnesia:abort(Reason)
+ end;
+ true ->
+ {true, optional}
+ end;
+
+prepare_op(_Tid, {op, change_table_copy_type, N, FromS, ToS, TabDef}, _WaitFor)
+ when N == node() ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+
+ NotActive = mnesia_lib:not_active_here(Tab),
+
+ if
+ NotActive == true ->
+ mnesia:abort({not_active, Tab, node()});
+
+ Tab == schema ->
+ case {FromS, ToS} of
+ {ram_copies, disc_copies} ->
+ case mnesia:system_info(schema_location) of
+ opt_disc ->
+ ignore;
+ _ ->
+ mnesia:abort({combine_error, Tab, node(),
+ "schema_location must be opt_disc"})
+ end,
+ Dir = mnesia_lib:dir(),
+ case opt_create_dir(true, Dir) of
+ ok ->
+ purge_dir(Dir, []),
+ mnesia_log:purge_all_logs(),
+ set(use_dir, true),
+ mnesia_log:init(),
+ Ns = val({current, db_nodes}), %mnesia_lib:running_nodes(),
+ F = fun(U) -> mnesia_recover:log_mnesia_up(U) end,
+ lists:foreach(F, Ns),
+
+ mnesia_dumper:raw_named_dump_table(Tab, dmp),
+ mnesia_checkpoint:tm_change_table_copy_type(Tab, FromS, ToS);
+ {error, Reason} ->
+ mnesia:abort(Reason)
+ end;
+ {disc_copies, ram_copies} ->
+ Ltabs = val({schema, local_tables}) -- [schema],
+ Dtabs = [L || L <- Ltabs,
+ val({L, storage_type}) /= ram_copies],
+ verify([], Dtabs, {"Disc resident tables", Dtabs, N});
+ _ ->
+ mnesia:abort({combine_error, Tab, ToS})
+ end;
+
+ FromS == ram_copies ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Dat = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dat) of
+ true ->
+ mnesia:abort({combine_error, Tab, node(),
+ "Table dump exists"});
+ false ->
+ case ToS of
+ disc_copies ->
+ mnesia_log:ets2dcd(Tab, dmp);
+ disc_only_copies ->
+ mnesia_dumper:raw_named_dump_table(Tab, dmp)
+ end,
+ mnesia_checkpoint:tm_change_table_copy_type(Tab, FromS, ToS)
+ end;
+ false ->
+ mnesia:abort({has_no_disc, node()})
+ end;
+
+ FromS == disc_copies, ToS == disc_only_copies ->
+ mnesia_dumper:raw_named_dump_table(Tab, dmp);
+ FromS == disc_only_copies ->
+ Type = Cs#cstruct.type,
+ create_ram_table(Tab, Type),
+ Datname = mnesia_lib:tab2dat(Tab),
+ Repair = mnesia_monitor:get_env(auto_repair),
+ case mnesia_lib:dets_to_ets(Tab, Tab, Datname, Type, Repair, no) of
+ loaded -> ok;
+ Reason ->
+ Err = "Failed to copy disc data to ram",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end;
+ true ->
+ ignore
+ end,
+ {true, mandatory};
+
+prepare_op(_Tid, {op, change_table_copy_type, N, _FromS, _ToS, _TabDef}, _WaitFor)
+ when N /= node() ->
+ {true, mandatory};
+
+prepare_op(_Tid, {op, delete_table, _TabDef}, _WaitFor) ->
+ {true, mandatory};
+
+prepare_op(_Tid, {op, dump_table, unknown, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ case lists:member(node(), Cs#cstruct.ram_copies) of
+ true ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:ets2dcd(Tab, dmp),
+ Size = mnesia:table_info(Tab, size),
+ {true, [{op, dump_table, Size, TabDef}], optional};
+ false ->
+ mnesia:abort({has_no_disc, node()})
+ end;
+ false ->
+ {false, optional}
+ end;
+
+prepare_op(_Tid, {op, add_snmp, Ustruct, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ {true, optional};
+ Storage ->
+ Tab = Cs#cstruct.name,
+ Stab = mnesia_snmp_hook:create_table(Ustruct, Tab, Storage),
+ mnesia_lib:set({Tab, {index, snmp}}, Stab),
+ {true, optional}
+ end;
+
+prepare_op(_Tid, {op, transform, ignore, _TabDef}, _WaitFor) ->
+ {true, mandatory}; %% Apply schema changes only.
+prepare_op(_Tid, {op, transform, Fun, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ {true, mandatory};
+ Storage ->
+ Tab = Cs#cstruct.name,
+ RecName = Cs#cstruct.record_name,
+ Type = Cs#cstruct.type,
+ NewArity = length(Cs#cstruct.attributes) + 1,
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+ Key = mnesia_lib:db_first(Tab),
+ Op = {op, transform, Fun, TabDef},
+ case catch transform_objs(Fun, Tab, RecName,
+ Key, NewArity, Storage, Type, [Op]) of
+ {'EXIT', Reason} ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ exit({"Bad transform function", Tab, Fun, node(), Reason});
+ Objs ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ {true, Objs, mandatory}
+ end
+ end;
+
+prepare_op(_Tid, _Op, _WaitFor) ->
+ {true, optional}.
+
+
+create_ram_table(Tab, Type) ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ case mnesia_monitor:unsafe_mktab(Tab, Args) of
+ Tab ->
+ ok;
+ {error,Reason} ->
+ Err = "Failed to create ets table",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end.
+create_disc_table(Tab) ->
+ File = mnesia_lib:tab2dcd(Tab),
+ file:delete(File),
+ FArg = [{file, File}, {name, {mnesia,create}},
+ {repair, false}, {mode, read_write}],
+ case mnesia_monitor:open_log(FArg) of
+ {ok,Log} ->
+ mnesia_monitor:unsafe_close_log(Log),
+ ok;
+ {error,Reason} ->
+ Err = "Failed to create disc table",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end.
+create_disc_only_table(Tab,Type) ->
+ File = mnesia_lib:tab2dat(Tab),
+ file:delete(File),
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ case mnesia_monitor:unsafe_open_dets(Tab, Args) of
+ {ok, _} ->
+ ok;
+ {error,Reason} ->
+ Err = "Failed to create disc table",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end.
+
+
+receive_sync([], Pids) ->
+ Pids;
+receive_sync(Nodes, Pids) ->
+ receive
+ {sync_trans, Pid} ->
+ Node = node(Pid),
+ receive_sync(lists:delete(Node, Nodes), [Pid | Pids]);
+ Else ->
+ {abort, Else}
+ end.
+
+lock_del_table(Tab, Node, Cs, Father) ->
+ Ns = val({schema, active_replicas}),
+ Lock = fun() ->
+ mnesia:write_lock_table(Tab),
+ {Res, []} = rpc:multicall(Ns, ?MODULE, set_where_to_read, [Tab, Node, Cs]),
+ Filter = fun(ok) ->
+ false;
+ ({badrpc, {'EXIT', {undef, _}}}) ->
+ %% This will be the case we talks with elder nodes
+ %% than 3.8.2, they will set where_to_read without
+ %% getting a lock.
+ false;
+ (_) ->
+ true
+ end,
+ [] = lists:filter(Filter, Res),
+ ok
+ end,
+ case mnesia:transaction(Lock) of
+ {'atomic', ok} ->
+ Father ! {self(), updated};
+ {aborted, R} ->
+ Father ! {self(), R}
+ end,
+ unlink(Father),
+ exit(normal).
+
+set_where_to_read(Tab, Node, Cs) ->
+ case mnesia_lib:val({Tab, where_to_read}) of
+ Node ->
+ case Cs#cstruct.local_content of
+ true ->
+ ok;
+ false ->
+ mnesia_lib:set_remote_where_to_read(Tab, [Node]),
+ ok
+ end;
+ _ ->
+ ok
+ end.
+
+%% Build up the list in reverse order.
+transform_objs(_Fun, _Tab, _RT, '$end_of_table', _NewArity, _Storage, _Type, Acc) ->
+ Acc;
+transform_objs(Fun, Tab, RecName, Key, A, Storage, Type, Acc) ->
+ Objs = mnesia_lib:db_get(Tab, Key),
+ NextKey = mnesia_lib:db_next_key(Tab, Key),
+ Oid = {Tab, Key},
+ NewObjs = {Ws, Ds} = transform_obj(Tab, RecName, Key, Fun, Objs, A, Type, [], []),
+ if
+ NewObjs == {[], []} ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type, Acc);
+ Type == bag ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ws, write}},
+ {op, rec, Storage, {Oid, [Oid], delete}} | Acc]);
+ Ds == [] ->
+ %% Type is set or ordered_set, no need to delete the record first
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ws, write}} | Acc]);
+ Ws == [] ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ds, write}} | Acc]);
+ true ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ws, write}},
+ {op, rec, Storage, {Oid, Ds, delete}} | Acc])
+ end.
+
+transform_obj(Tab, RecName, Key, Fun, [Obj|Rest], NewArity, Type, Ws, Ds) ->
+ NewObj = Fun(Obj),
+ if
+ size(NewObj) /= NewArity ->
+ exit({"Bad arity", Obj, NewObj});
+ NewObj == Obj ->
+ transform_obj(Tab, RecName, Key, Fun, Rest, NewArity, Type, Ws, Ds);
+ RecName == element(1, NewObj), Key == element(2, NewObj) ->
+ transform_obj(Tab, RecName, Key, Fun, Rest, NewArity,
+ Type, [NewObj | Ws], Ds);
+ NewObj == delete ->
+ case Type of
+ bag -> %% Just don't write that object
+ transform_obj(Tab, RecName, Key, Fun, Rest,
+ NewArity, Type, Ws, Ds);
+ _ ->
+ transform_obj(Tab, RecName, Key, Fun, Rest, NewArity,
+ Type, Ws, [NewObj | Ds])
+ end;
+ true ->
+ exit({"Bad key or Record Name", Obj, NewObj})
+ end;
+transform_obj(_Tab, _RecName, _Key, _Fun, [], _NewArity, _Type, Ws, Ds) ->
+ {lists:reverse(Ws), lists:reverse(Ds)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Undo prepare of commit
+undo_prepare_commit(Tid, Commit) ->
+ case Commit#commit.schema_ops of
+ [] ->
+ ignore;
+ Ops ->
+ %% Catch to allow failure mnesia_controller may not be started
+ catch mnesia_controller:release_schema_commit_lock(),
+ undo_prepare_ops(Tid, Ops)
+ end,
+ Commit.
+
+%% Undo in reverse order
+undo_prepare_ops(Tid, [Op | Ops]) ->
+ case element(1, Op) of
+ TheOp when TheOp /= op, TheOp /= restore_op ->
+ undo_prepare_ops(Tid, Ops);
+ _ ->
+ undo_prepare_ops(Tid, Ops),
+ undo_prepare_op(Tid, Op)
+ end;
+undo_prepare_ops(_Tid, []) ->
+ [].
+
+undo_prepare_op(_Tid, {op, announce_im_running, _, _, Running, RemoteRunning}) ->
+ case lists:member(node(), Running) of
+ true ->
+ unannounce_im_running(RemoteRunning -- Running);
+ false ->
+ unannounce_im_running(Running -- RemoteRunning)
+ end;
+
+undo_prepare_op(_Tid, {op, sync_trans}) ->
+ ok;
+
+undo_prepare_op(Tid, {op, create_table, TabDef}) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_lib:unset({Tab, create_table}),
+ delete_cstruct(Tid, Cs),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ ok;
+ ram_copies ->
+ ram_delete_table(Tab, ram_copies);
+ disc_copies ->
+ ram_delete_table(Tab, disc_copies),
+ DcdFile = mnesia_lib:tab2dcd(Tab),
+ %% disc_delete_table(Tab, Storage),
+ file:delete(DcdFile);
+ disc_only_copies ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ Dat = mnesia_lib:tab2dat(Tab),
+ %% disc_delete_table(Tab, Storage),
+ file:delete(Dat)
+ end;
+
+undo_prepare_op(Tid, {op, add_table_copy, Storage, Node, TabDef}) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ if
+ Tab == schema ->
+ true; % Nothing to prepare
+ Node == node() ->
+ mnesia_checkpoint:tm_del_copy(Tab, Node),
+ mnesia_controller:unannounce_add_table_copy(Tab, Node),
+ if
+ Storage == disc_only_copies; Tab == schema ->
+ mnesia_monitor:close_dets(Tab),
+ file:delete(mnesia_lib:tab2dat(Tab));
+ true ->
+ file:delete(mnesia_lib:tab2dcd(Tab))
+ end,
+ ram_delete_table(Tab, Storage),
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ insert_cstruct(Tid, Cs2, true); % Don't care about the version
+ Node /= node() ->
+ mnesia_controller:unannounce_add_table_copy(Tab, Node),
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ insert_cstruct(Tid, Cs2, true) % Don't care about the version
+ end;
+
+undo_prepare_op(_Tid, {op, del_table_copy, _, Node, TabDef})
+ when Node == node() ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_lib:set({Tab, where_to_read}, Node);
+
+
+undo_prepare_op(_Tid, {op, change_table_copy_type, N, FromS, ToS, TabDef})
+ when N == node() ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_checkpoint:tm_change_table_copy_type(Tab, ToS, FromS),
+ Dmp = mnesia_lib:tab2dmp(Tab),
+
+ case {FromS, ToS} of
+ {ram_copies, disc_copies} when Tab == schema ->
+ file:delete(Dmp),
+ mnesia_log:purge_some_logs(),
+ set(use_dir, false);
+ {ram_copies, disc_copies} ->
+ file:delete(Dmp);
+ {ram_copies, disc_only_copies} ->
+ file:delete(Dmp);
+ {disc_only_copies, _} ->
+ ram_delete_table(Tab, ram_copies);
+ _ ->
+ ignore
+ end;
+
+undo_prepare_op(_Tid, {op, dump_table, _Size, TabDef}) ->
+ Cs = list2cs(TabDef),
+ case lists:member(node(), Cs#cstruct.ram_copies) of
+ true ->
+ Tab = Cs#cstruct.name,
+ Dmp = mnesia_lib:tab2dmp(Tab),
+ file:delete(Dmp);
+ false ->
+ ignore
+ end;
+
+undo_prepare_op(_Tid, {op, add_snmp, _Ustruct, TabDef}) ->
+ Cs = list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ true;
+ _Storage ->
+ Tab = Cs#cstruct.name,
+ case ?catch_val({Tab, {index, snmp}}) of
+ {'EXIT',_} ->
+ ignore;
+ Stab ->
+ mnesia_snmp_hook:delete_table(Tab, Stab),
+ mnesia_lib:unset({Tab, {index, snmp}})
+ end
+ end;
+
+undo_prepare_op(_Tid, _Op) ->
+ ignore.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+ram_delete_table(Tab, Storage) ->
+ case Storage of
+ unknown ->
+ ignore;
+ disc_only_copies ->
+ ignore;
+ _Else ->
+ %% delete possible index files and data .....
+ %% Got to catch this since if no info has been set in the
+ %% mnesia_gvar it will crash
+ catch mnesia_index:del_transient(Tab, Storage),
+ case ?catch_val({Tab, {index, snmp}}) of
+ {'EXIT', _} ->
+ ignore;
+ Etab ->
+ catch mnesia_snmp_hook:delete_table(Tab, Etab)
+ end,
+ catch ?ets_delete_table(Tab)
+ end.
+
+purge_dir(Dir, KeepFiles) ->
+ Suffixes = known_suffixes(),
+ purge_dir(Dir, KeepFiles, Suffixes).
+
+purge_dir(Dir, KeepFiles, Suffixes) ->
+ case dir_exists(Dir) of
+ true ->
+ {ok, AllFiles} = file:list_dir(Dir),
+ purge_known_files(AllFiles, KeepFiles, Dir, Suffixes);
+ false ->
+ ok
+ end.
+
+purge_tmp_files() ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Dir = mnesia_lib:dir(),
+ KeepFiles = [],
+ Exists = mnesia_lib:exists(mnesia_lib:tab2dat(schema)),
+ case Exists of
+ true ->
+ Suffixes = tmp_suffixes(),
+ purge_dir(Dir, KeepFiles, Suffixes);
+ false ->
+ %% Interrupted change of storage type
+ %% for schema table
+ Suffixes = known_suffixes(),
+ purge_dir(Dir, KeepFiles, Suffixes),
+ mnesia_lib:set(use_dir, false)
+ end;
+
+ false ->
+ ok
+ end.
+
+purge_known_files([File | Tail], KeepFiles, Dir, Suffixes) ->
+ case lists:member(File, KeepFiles) of
+ true ->
+ ignore;
+ false ->
+ case has_known_suffix(File, Suffixes, false) of
+ false ->
+ ignore;
+ true ->
+ AbsFile = filename:join([Dir, File]),
+ file:delete(AbsFile)
+ end
+ end,
+ purge_known_files(Tail, KeepFiles, Dir, Suffixes);
+purge_known_files([], _KeepFiles, _Dir, _Suffixes) ->
+ ok.
+
+has_known_suffix(_File, _Suffixes, true) ->
+ true;
+has_known_suffix(File, [Suffix | Tail], false) ->
+ has_known_suffix(File, Tail, lists:suffix(Suffix, File));
+has_known_suffix(_File, [], Bool) ->
+ Bool.
+
+known_suffixes() -> real_suffixes() ++ tmp_suffixes().
+
+real_suffixes() -> [".DAT", ".LOG", ".BUP", ".DCL", ".DCD"].
+
+tmp_suffixes() -> [".TMP", ".BUPTMP", ".RET", ".DMP"].
+
+info() ->
+ Tabs = lists:sort(val({schema, tables})),
+ lists:foreach(fun(T) -> info(T) end, Tabs),
+ ok.
+
+info(Tab) ->
+ Props = get_table_properties(Tab),
+ io:format("-- Properties for ~w table --- ~n",[Tab]),
+ info2(Tab, Props).
+info2(Tab, [{cstruct, _V} | Tail]) -> % Ignore cstruct
+ info2(Tab, Tail);
+info2(Tab, [{frag_hash, _V} | Tail]) -> % Ignore frag_hash
+ info2(Tab, Tail);
+info2(Tab, [{P, V} | Tail]) ->
+ io:format("~-20w -> ~p~n",[P,V]),
+ info2(Tab, Tail);
+info2(_, []) ->
+ io:format("~n", []).
+
+get_table_properties(Tab) ->
+ case catch mnesia_lib:db_match_object(ram_copies,
+ mnesia_gvar, {{Tab, '_'}, '_'}) of
+ {'EXIT', _} ->
+ mnesia:abort({no_exists, Tab, all});
+ RawGvar ->
+ case [{Item, Val} || {{_Tab, Item}, Val} <- RawGvar] of
+ [] ->
+ [];
+ Gvar ->
+ Size = {size, mnesia:table_info(Tab, size)},
+ Memory = {memory, mnesia:table_info(Tab, memory)},
+ Master = {master_nodes, mnesia:table_info(Tab, master_nodes)},
+ lists:sort([Size, Memory, Master | Gvar])
+ end
+ end.
+
+%%%%%%%%%%% RESTORE %%%%%%%%%%%
+
+-record(r, {iter = schema,
+ module,
+ table_options = [],
+ default_op = clear_tables,
+ tables = [],
+ opaque,
+ insert_op = error_fun,
+ recs = error_recs
+ }).
+
+restore(Opaque) ->
+ restore(Opaque, [], mnesia_monitor:get_env(backup_module)).
+restore(Opaque, Args) when list(Args) ->
+ restore(Opaque, Args, mnesia_monitor:get_env(backup_module));
+restore(_Opaque, BadArg) ->
+ {aborted, {badarg, BadArg}}.
+restore(Opaque, Args, Module) when list(Args), atom(Module) ->
+ InitR = #r{opaque = Opaque, module = Module},
+ case catch lists:foldl(fun check_restore_arg/2, InitR, Args) of
+ R when record(R, r) ->
+ case mnesia_bup:read_schema(Module, Opaque) of
+ {error, Reason} ->
+ {aborted, Reason};
+ BupSchema ->
+ schema_transaction(fun() -> do_restore(R, BupSchema) end)
+ end;
+ {'EXIT', Reason} ->
+ {aborted, Reason}
+ end;
+restore(_Opaque, Args, Module) ->
+ {aborted, {badarg, Args, Module}}.
+
+check_restore_arg({module, Mod}, R) when atom(Mod) ->
+ R#r{module = Mod};
+
+check_restore_arg({clear_tables, List}, R) when list(List) ->
+ case lists:member(schema, List) of
+ false ->
+ TableList = [{Tab, clear_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+ true ->
+ exit({badarg, {clear_tables, schema}})
+ end;
+check_restore_arg({recreate_tables, List}, R) when list(List) ->
+ case lists:member(schema, List) of
+ false ->
+ TableList = [{Tab, recreate_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+ true ->
+ exit({badarg, {recreate_tables, schema}})
+ end;
+check_restore_arg({keep_tables, List}, R) when list(List) ->
+ TableList = [{Tab, keep_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+check_restore_arg({skip_tables, List}, R) when list(List) ->
+ TableList = [{Tab, skip_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+check_restore_arg({default_op, Op}, R) ->
+ case Op of
+ clear_tables -> ok;
+ recreate_tables -> ok;
+ keep_tables -> ok;
+ skip_tables -> ok;
+ Else ->
+ exit({badarg, {bad_default_op, Else}})
+ end,
+ R#r{default_op = Op};
+
+check_restore_arg(BadArg,_) ->
+ exit({badarg, BadArg}).
+
+do_restore(R, BupSchema) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ R2 = restore_schema(BupSchema, R),
+ insert_schema_ops(TidTs, [{restore_op, R2}]),
+ [element(1, TabStruct) || TabStruct <- R2#r.tables].
+
+arrange_restore(R, Fun, Recs) ->
+ R2 = R#r{insert_op = Fun, recs = Recs},
+ case mnesia_bup:iterate(R#r.module, fun restore_items/4, R#r.opaque, R2) of
+ {ok, R3} -> R3#r.recs;
+ {error, Reason} -> mnesia:abort(Reason);
+ Reason -> mnesia:abort(Reason)
+ end.
+
+restore_items([Rec | Recs], Header, Schema, R) ->
+ Tab = element(1, Rec),
+ case lists:keysearch(Tab, 1, R#r.tables) of
+ {value, {Tab, Where, Snmp, RecName}} ->
+ {Rest, NRecs} =
+ restore_tab_items([Rec | Recs], Tab, RecName, Where, Snmp,
+ R#r.recs, R#r.insert_op),
+ restore_items(Rest, Header, Schema, R#r{recs = NRecs});
+ false ->
+ Rest = skip_tab_items(Recs, Tab),
+ restore_items(Rest, Header, Schema, R)
+ end;
+
+restore_items([], _Header, _Schema, R) ->
+ R.
+
+restore_func(Tab, R) ->
+ case lists:keysearch(Tab, 1, R#r.table_options) of
+ {value, {Tab, OP}} ->
+ OP;
+ false ->
+ R#r.default_op
+ end.
+
+where_to_commit(Tab, CsList) ->
+ Ram = [{N, ram_copies} || N <- pick(Tab, ram_copies, CsList, [])],
+ Disc = [{N, disc_copies} || N <- pick(Tab, disc_copies, CsList, [])],
+ DiscO = [{N, disc_only_copies} || N <- pick(Tab, disc_only_copies, CsList, [])],
+ Ram ++ Disc ++ DiscO.
+
+%% Changes of the Meta info of schema itself is not allowed
+restore_schema([{schema, schema, _List} | Schema], R) ->
+ restore_schema(Schema, R);
+restore_schema([{schema, Tab, List} | Schema], R) ->
+ case restore_func(Tab, R) of
+ clear_tables ->
+ do_clear_table(Tab),
+ Where = val({Tab, where_to_commit}),
+ Snmp = val({Tab, snmp}),
+ RecName = val({Tab, record_name}),
+ R2 = R#r{tables = [{Tab, Where, Snmp, RecName} | R#r.tables]},
+ restore_schema(Schema, R2);
+ recreate_tables ->
+ TidTs = get_tid_ts_and_lock(Tab, write),
+ NC = {cookie, ?unique_cookie},
+ List2 = lists:keyreplace(cookie, 1, List, NC),
+ Where = where_to_commit(Tab, List2),
+ Snmp = pick(Tab, snmp, List2, []),
+ RecName = pick(Tab, record_name, List2, Tab),
+% case ?catch_val({Tab, cstruct}) of
+% {'EXIT', _} ->
+% ignore;
+% OldCs when record(OldCs, cstruct) ->
+% do_delete_table(Tab)
+% end,
+% unsafe_do_create_table(list2cs(List2)),
+ insert_schema_ops(TidTs, [{op, restore_recreate, List2}]),
+ R2 = R#r{tables = [{Tab, Where, Snmp, RecName} | R#r.tables]},
+ restore_schema(Schema, R2);
+ keep_tables ->
+ get_tid_ts_and_lock(Tab, write),
+ Where = val({Tab, where_to_commit}),
+ Snmp = val({Tab, snmp}),
+ RecName = val({Tab, record_name}),
+ R2 = R#r{tables = [{Tab, Where, Snmp, RecName} | R#r.tables]},
+ restore_schema(Schema, R2);
+ skip_tables ->
+ restore_schema(Schema, R)
+ end;
+
+restore_schema([{schema, Tab} | Schema], R) ->
+ do_delete_table(Tab),
+ Tabs = lists:delete(Tab,R#r.tables),
+ restore_schema(Schema, R#r{tables = Tabs});
+restore_schema([], R) ->
+ R.
+
+restore_tab_items([Rec | Rest], Tab, RecName, Where, Snmp, Recs, Op)
+ when element(1, Rec) == Tab ->
+ NewRecs = Op(Rec, Recs, RecName, Where, Snmp),
+ restore_tab_items(Rest, Tab, RecName, Where, Snmp, NewRecs, Op);
+
+restore_tab_items(Rest, _Tab, _RecName, _Where, _Snmp, Recs, _Op) ->
+ {Rest, Recs}.
+
+skip_tab_items([Rec| Rest], Tab)
+ when element(1, Rec) == Tab ->
+ skip_tab_items(Rest, Tab);
+skip_tab_items(Recs, _) ->
+ Recs.
+
+%%%%%%%%% Dump tables %%%%%%%%%%%%%
+dump_tables(Tabs) when list(Tabs) ->
+ schema_transaction(fun() -> do_dump_tables(Tabs) end);
+dump_tables(Tabs) ->
+ {aborted, {bad_type, Tabs}}.
+
+do_dump_tables(Tabs) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ insert_schema_ops(TidTs, make_dump_tables(Tabs)).
+
+make_dump_tables([schema | _Tabs]) ->
+ mnesia:abort({bad_type, schema});
+make_dump_tables([Tab | Tabs]) ->
+ get_tid_ts_and_lock(Tab, read),
+ TabDef = get_create_list(Tab),
+ DiscResident = val({Tab, disc_copies}) ++ val({Tab, disc_only_copies}),
+ verify([], DiscResident,
+ {"Only allowed on ram_copies", Tab, DiscResident}),
+ [{op, dump_table, unknown, TabDef} | make_dump_tables(Tabs)];
+make_dump_tables([]) ->
+ [].
+
+%% Merge the local schema with the schema on other nodes
+merge_schema() ->
+ schema_transaction(fun() -> do_merge_schema() end).
+
+do_merge_schema() ->
+ {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, write),
+ Connected = val(recover_nodes),
+ Running = val({current, db_nodes}),
+ Store = Ts#tidstore.store,
+ case Connected -- Running of
+ [Node | _] ->
+ %% Time for a schema merging party!
+ mnesia_locker:wlock_no_exist(Tid, Store, schema, [Node]),
+
+ case rpc:call(Node, mnesia_controller, get_cstructs, []) of
+ {cstructs, Cstructs, RemoteRunning1} ->
+ LockedAlready = Running ++ [Node],
+ {New, Old} = mnesia_recover:connect_nodes(RemoteRunning1),
+ RemoteRunning = mnesia_lib:intersect(New ++ Old, RemoteRunning1),
+ if
+ RemoteRunning /= RemoteRunning1 ->
+ mnesia_lib:error("Mnesia on ~p could not connect to node(s) ~p~n",
+ [node(), RemoteRunning1 -- RemoteRunning]);
+ true -> ok
+ end,
+ NeedsLock = RemoteRunning -- LockedAlready,
+ mnesia_locker:wlock_no_exist(Tid, Store, schema, NeedsLock),
+
+ {value, SchemaCs} =
+ lists:keysearch(schema, #cstruct.name, Cstructs),
+
+ %% Announce that Node is running
+ A = [{op, announce_im_running, node(),
+ cs2list(SchemaCs), Running, RemoteRunning}],
+ do_insert_schema_ops(Store, A),
+
+ %% Introduce remote tables to local node
+ do_insert_schema_ops(Store, make_merge_schema(Node, Cstructs)),
+
+ %% Introduce local tables to remote nodes
+ Tabs = val({schema, tables}),
+ Ops = [{op, merge_schema, get_create_list(T)}
+ || T <- Tabs,
+ not lists:keymember(T, #cstruct.name, Cstructs)],
+ do_insert_schema_ops(Store, Ops),
+
+ %% Ensure that the txn will be committed on all nodes
+ announce_im_running(RemoteRunning, SchemaCs),
+ {merged, Running, RemoteRunning};
+ {error, Reason} ->
+ {"Cannot get cstructs", Node, Reason};
+ {badrpc, Reason} ->
+ {"Cannot get cstructs", Node, {badrpc, Reason}}
+ end;
+ [] ->
+ %% No more nodes to merge schema with
+ not_merged
+ end.
+
+make_merge_schema(Node, [Cs | Cstructs]) ->
+ Ops = do_make_merge_schema(Node, Cs),
+ Ops ++ make_merge_schema(Node, Cstructs);
+make_merge_schema(_Node, []) ->
+ [].
+
+%% Merge definitions of schema table
+do_make_merge_schema(Node, RemoteCs)
+ when RemoteCs#cstruct.name == schema ->
+ Cs = val({schema, cstruct}),
+ Masters = mnesia_recover:get_master_nodes(schema),
+ HasRemoteMaster = lists:member(Node, Masters),
+ HasLocalMaster = lists:member(node(), Masters),
+ Force = HasLocalMaster or HasRemoteMaster,
+ %% What is the storage types opinions?
+ StCsLocal = mnesia_lib:cs_to_storage_type(node(), Cs),
+ StRcsLocal = mnesia_lib:cs_to_storage_type(node(), RemoteCs),
+ StCsRemote = mnesia_lib:cs_to_storage_type(Node, Cs),
+ StRcsRemote = mnesia_lib:cs_to_storage_type(Node, RemoteCs),
+
+ if
+ Cs#cstruct.cookie == RemoteCs#cstruct.cookie,
+ Cs#cstruct.version == RemoteCs#cstruct.version ->
+ %% Great, we have the same cookie and version
+ %% and do not need to merge cstructs
+ [];
+
+ Cs#cstruct.cookie /= RemoteCs#cstruct.cookie,
+ Cs#cstruct.disc_copies /= [],
+ RemoteCs#cstruct.disc_copies /= [] ->
+ %% Both cstructs involves disc nodes
+ %% and we cannot merge them
+ if
+ HasLocalMaster == true,
+ HasRemoteMaster == false ->
+ %% Choose local cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(Cs)}];
+
+ HasRemoteMaster == true,
+ HasLocalMaster == false ->
+ %% Choose remote cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(RemoteCs)}];
+
+ true ->
+ Str = io_lib:format("Incompatible schema cookies. "
+ "Please, restart from old backup."
+ "~w = ~w, ~w = ~w~n",
+ [Node, cs2list(RemoteCs), node(), cs2list(Cs)]),
+ throw(Str)
+ end;
+
+ StCsLocal /= StRcsLocal, StRcsLocal /= unknown ->
+ Str = io_lib:format("Incompatible schema storage types. "
+ "on ~w storage ~w, on ~w storage ~w~n",
+ [node(), StCsLocal, Node, StRcsLocal]),
+ throw(Str);
+ StCsRemote /= StRcsRemote, StCsRemote /= unknown ->
+ Str = io_lib:format("Incompatible schema storage types. "
+ "on ~w storage ~w, on ~w storage ~w~n",
+ [node(), StCsRemote, Node, StRcsRemote]),
+ throw(Str);
+
+ Cs#cstruct.disc_copies /= [] ->
+ %% Choose local cstruct,
+ %% since it involves disc nodes
+ MergedCs = merge_cstructs(Cs, RemoteCs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ RemoteCs#cstruct.disc_copies /= [] ->
+ %% Choose remote cstruct,
+ %% since it involves disc nodes
+ MergedCs = merge_cstructs(RemoteCs, Cs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ Cs > RemoteCs ->
+ %% Choose remote cstruct
+ MergedCs = merge_cstructs(RemoteCs, Cs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ true ->
+ %% Choose local cstruct
+ MergedCs = merge_cstructs(Cs, RemoteCs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}]
+ end;
+
+%% Merge definitions of normal table
+do_make_merge_schema(Node, RemoteCs) ->
+ Tab = RemoteCs#cstruct.name,
+ Masters = mnesia_recover:get_master_nodes(schema),
+ HasRemoteMaster = lists:member(Node, Masters),
+ HasLocalMaster = lists:member(node(), Masters),
+ Force = HasLocalMaster or HasRemoteMaster,
+ case ?catch_val({Tab, cstruct}) of
+ {'EXIT', _} ->
+ %% A completely new table, created while Node was down
+ [{op, merge_schema, cs2list(RemoteCs)}];
+ Cs when Cs#cstruct.cookie == RemoteCs#cstruct.cookie ->
+ if
+ Cs#cstruct.version == RemoteCs#cstruct.version ->
+ %% We have exactly the same version of the
+ %% table def
+ [];
+
+ Cs#cstruct.version > RemoteCs#cstruct.version ->
+ %% Oops, we have different versions
+ %% of the table def, lets merge them.
+ %% The only changes that may have occurred
+ %% is that new replicas may have been added.
+ MergedCs = merge_cstructs(Cs, RemoteCs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ Cs#cstruct.version < RemoteCs#cstruct.version ->
+ %% Oops, we have different versions
+ %% of the table def, lets merge them
+ MergedCs = merge_cstructs(RemoteCs, Cs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}]
+ end;
+ Cs ->
+ %% Different cookies, not possible to merge
+ if
+ HasLocalMaster == true,
+ HasRemoteMaster == false ->
+ %% Choose local cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(Cs)}];
+
+ HasRemoteMaster == true,
+ HasLocalMaster == false ->
+ %% Choose remote cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(RemoteCs)}];
+
+ true ->
+ Str = io_lib:format("Bad cookie in table definition"
+ " ~w: ~w = ~w, ~w = ~w~n",
+ [Tab, node(), Cs, Node, RemoteCs]),
+ throw(Str)
+ end
+ end.
+
+%% Change of table definitions (cstructs) requires all replicas
+%% of the table to be active. New replicas, db_nodes and tables
+%% may however be added even if some replica is inactive. These
+%% invariants must be enforced in order to allow merge of cstructs.
+%%
+%% Returns a new cstruct or issues a fatal error
+merge_cstructs(Cs, RemoteCs, Force) ->
+ verify_cstruct(Cs),
+ case catch do_merge_cstructs(Cs, RemoteCs, Force) of
+ {'EXIT', {aborted, _Reason}} when Force == true ->
+ Cs;
+ {'EXIT', Reason} ->
+ exit(Reason);
+ MergedCs when record(MergedCs, cstruct) ->
+ MergedCs;
+ Other ->
+ throw(Other)
+ end.
+
+do_merge_cstructs(Cs, RemoteCs, Force) ->
+ verify_cstruct(RemoteCs),
+ Ns = mnesia_lib:uniq(mnesia_lib:cs_to_nodes(Cs) ++
+ mnesia_lib:cs_to_nodes(RemoteCs)),
+ {AnythingNew, MergedCs} =
+ merge_storage_type(Ns, false, Cs, RemoteCs, Force),
+ MergedCs2 = merge_versions(AnythingNew, MergedCs, RemoteCs, Force),
+ verify_cstruct(MergedCs2),
+ MergedCs2.
+
+merge_storage_type([N | Ns], AnythingNew, Cs, RemoteCs, Force) ->
+ Local = mnesia_lib:cs_to_storage_type(N, Cs),
+ Remote = mnesia_lib:cs_to_storage_type(N, RemoteCs),
+ case compare_storage_type(true, Local, Remote) of
+ {same, _Storage} ->
+ merge_storage_type(Ns, AnythingNew, Cs, RemoteCs, Force);
+ {diff, Storage} ->
+ Cs2 = change_storage_type(N, Storage, Cs),
+ merge_storage_type(Ns, true, Cs2, RemoteCs, Force);
+ incompatible when Force == true ->
+ merge_storage_type(Ns, AnythingNew, Cs, RemoteCs, Force);
+ Other ->
+ Str = io_lib:format("Cannot merge storage type for node ~w "
+ "in cstruct ~w with remote cstruct ~w (~w)~n",
+ [N, Cs, RemoteCs, Other]),
+ throw(Str)
+ end;
+merge_storage_type([], AnythingNew, MergedCs, _RemoteCs, _Force) ->
+ {AnythingNew, MergedCs}.
+
+compare_storage_type(_Retry, Any, Any) ->
+ {same, Any};
+compare_storage_type(_Retry, unknown, Any) ->
+ {diff, Any};
+compare_storage_type(_Retry, ram_copies, disc_copies) ->
+ {diff, disc_copies};
+compare_storage_type(_Retry, disc_copies, disc_only_copies) ->
+ {diff, disc_only_copies};
+compare_storage_type(true, One, Another) ->
+ compare_storage_type(false, Another, One);
+compare_storage_type(false, _One, _Another) ->
+ incompatible.
+
+change_storage_type(N, ram_copies, Cs) ->
+ Nodes = [N | Cs#cstruct.ram_copies],
+ Cs#cstruct{ram_copies = mnesia_lib:uniq(Nodes)};
+change_storage_type(N, disc_copies, Cs) ->
+ Nodes = [N | Cs#cstruct.disc_copies],
+ Cs#cstruct{disc_copies = mnesia_lib:uniq(Nodes)};
+change_storage_type(N, disc_only_copies, Cs) ->
+ Nodes = [N | Cs#cstruct.disc_only_copies],
+ Cs#cstruct{disc_only_copies = mnesia_lib:uniq(Nodes)}.
+
+%% BUGBUG: Verify match of frag info; equalit demanded for all but add_node
+
+merge_versions(AnythingNew, Cs, RemoteCs, Force) ->
+ if
+ Cs#cstruct.name == schema ->
+ ok;
+ Cs#cstruct.name /= schema,
+ Cs#cstruct.cookie == RemoteCs#cstruct.cookie ->
+ ok;
+ Force == true ->
+ ok;
+ true ->
+ Str = io_lib:format("Bad cookies. Cannot merge definitions of "
+ "table ~w. Local = ~w, Remote = ~w~n",
+ [Cs#cstruct.name, Cs, RemoteCs]),
+ throw(Str)
+ end,
+ if
+ Cs#cstruct.name == RemoteCs#cstruct.name,
+ Cs#cstruct.type == RemoteCs#cstruct.type,
+ Cs#cstruct.local_content == RemoteCs#cstruct.local_content,
+ Cs#cstruct.attributes == RemoteCs#cstruct.attributes,
+ Cs#cstruct.index == RemoteCs#cstruct.index,
+ Cs#cstruct.snmp == RemoteCs#cstruct.snmp,
+ Cs#cstruct.access_mode == RemoteCs#cstruct.access_mode,
+ Cs#cstruct.load_order == RemoteCs#cstruct.load_order,
+ Cs#cstruct.user_properties == RemoteCs#cstruct.user_properties ->
+ do_merge_versions(AnythingNew, Cs, RemoteCs);
+ Force == true ->
+ do_merge_versions(AnythingNew, Cs, RemoteCs);
+ true ->
+ Str1 = io_lib:format("Cannot merge definitions of "
+ "table ~w. Local = ~w, Remote = ~w~n",
+ [Cs#cstruct.name, Cs, RemoteCs]),
+ throw(Str1)
+ end.
+
+do_merge_versions(AnythingNew, MergedCs, RemoteCs) ->
+ {{Major1, Minor1}, _Detail1} = MergedCs#cstruct.version,
+ {{Major2, Minor2}, _Detail2} = RemoteCs#cstruct.version,
+ if
+ MergedCs#cstruct.version == RemoteCs#cstruct.version ->
+ MergedCs;
+ AnythingNew == false ->
+ MergedCs;
+ Major1 == Major2 ->
+ Minor = lists:max([Minor1, Minor2]),
+ V = {{Major1, Minor}, dummy},
+ incr_version(MergedCs#cstruct{version = V});
+ Major1 /= Major2 ->
+ Major = lists:max([Major1, Major2]),
+ V = {{Major, 0}, dummy},
+ incr_version(MergedCs#cstruct{version = V})
+ end.
+
+announce_im_running([N | Ns], SchemaCs) ->
+ {L1, L2} = mnesia_recover:connect_nodes([N]),
+ case lists:member(N, L1) or lists:member(N, L2) of
+ true ->
+%% dbg_out("Adding ~p to {current db_nodes} ~n", [N]), %% qqqq
+ mnesia_lib:add({current, db_nodes}, N),
+ mnesia_controller:add_active_replica(schema, N, SchemaCs);
+ false ->
+ ignore
+ end,
+ announce_im_running(Ns, SchemaCs);
+announce_im_running([], _) ->
+ [].
+
+unannounce_im_running([N | Ns]) ->
+ mnesia_lib:del({current, db_nodes}, N),
+ mnesia_controller:del_active_replica(schema, N),
+ mnesia_recover:disconnect(N),
+ unannounce_im_running(Ns);
+unannounce_im_running([]) ->
+ [].
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_hook.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_hook.erl
new file mode 100644
index 0000000000..458323c0e4
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_hook.erl
@@ -0,0 +1,271 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_snmp_hook.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_snmp_hook).
+
+%% Hooks (called from mnesia)
+-export([check_ustruct/1, create_table/3, delete_table/2,
+ key_to_oid/3, update/1, start/2,
+ get_row/2, get_next_index/2, get_mnesia_key/2]).
+
+%% sys callback functions
+-export([system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+%% Internal exports
+-export([b_init/2]).
+
+check_ustruct([]) ->
+ true; %% default value, not SNMP'ified
+check_ustruct([{key, Types}]) ->
+ is_snmp_type(to_list(Types));
+check_ustruct(_) -> false.
+
+to_list(Tuple) when tuple(Tuple) -> tuple_to_list(Tuple);
+to_list(X) -> [X].
+
+is_snmp_type([integer | T]) -> is_snmp_type(T);
+is_snmp_type([string | T]) -> is_snmp_type(T);
+is_snmp_type([fix_string | T]) -> is_snmp_type(T);
+is_snmp_type([]) -> true;
+is_snmp_type(_) -> false.
+
+create_table([], MnesiaTab, _Storage) ->
+ mnesia:abort({badarg, MnesiaTab, {snmp, empty_snmpstruct}});
+
+create_table([{key, Us}], MnesiaTab, Storage) ->
+ Tree = b_new(MnesiaTab, Us),
+ mnesia_lib:db_fixtable(Storage, MnesiaTab, true),
+ First = mnesia_lib:db_first(Storage, MnesiaTab),
+ build_table(First, MnesiaTab, Tree, Us, Storage),
+ mnesia_lib:db_fixtable(Storage, MnesiaTab, false),
+ Tree.
+
+build_table(MnesiaKey, MnesiaTab, Tree, Us, Storage)
+ when MnesiaKey /= '$end_of_table' ->
+%% SnmpKey = key_to_oid(MnesiaTab, MnesiaKey, Us),
+%% update(write, Tree, MnesiaKey, SnmpKey),
+ update(write, Tree, MnesiaKey, MnesiaKey),
+ Next = mnesia_lib:db_next_key(Storage, MnesiaTab, MnesiaKey),
+ build_table(Next, MnesiaTab, Tree, Us, Storage);
+build_table('$end_of_table', _MnesiaTab, _Tree, _Us, _Storage) ->
+ ok.
+
+delete_table(_MnesiaTab, Tree) ->
+ exit(Tree, shutdown),
+ ok.
+
+%%-----------------------------------------------------------------
+%% update({Op, MnesiaTab, MnesiaKey, SnmpKey})
+%%-----------------------------------------------------------------
+
+update({clear_table, MnesiaTab}) ->
+ Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}),
+ b_clear(Tree);
+
+update({Op, MnesiaTab, MnesiaKey, SnmpKey}) ->
+ Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}),
+ update(Op, Tree, MnesiaKey, SnmpKey).
+
+update(Op, Tree, MnesiaKey, _) ->
+ case Op of
+ write ->
+ b_insert(Tree, MnesiaKey, MnesiaKey);
+ update_counter ->
+ ignore;
+ delete ->
+ b_delete(Tree, MnesiaKey);
+ delete_object ->
+ b_delete(Tree, MnesiaKey)
+ end,
+ ok.
+
+%%-----------------------------------------------------------------
+%% Func: key_to_oid(Tab, Key, Ustruct)
+%% Args: Key ::= key()
+%% key() ::= int() | string() | {int() | string()}
+%% Type ::= {fix_string | term()}
+%% Make an OBJECT IDENTIFIER out of it.
+%% Variable length objects are prepended by their length.
+%% Ex. Key = {"pelle", 42} AND Type = {string, integer} =>
+%% OID [5, $p, $e, $l, $l, $e, 42]
+%% Key = {"pelle", 42} AND Type = {fix_string, integer} =>
+%% OID [$p, $e, $l, $l, $e, 42]
+%%-----------------------------------------------------------------
+key_to_oid(Tab, Key, [{key, Types}]) ->
+ MnesiaOid = {Tab, Key},
+ if
+ tuple(Key), tuple(Types) ->
+ case {size(Key), size(Types)} of
+ {Size, Size} ->
+ keys_to_oid(MnesiaOid, Size, Key, [], Types);
+ _ ->
+ exit({bad_snmp_key, MnesiaOid})
+ end;
+ true ->
+ key_to_oid_i(MnesiaOid, Key, Types)
+ end.
+
+key_to_oid_i(_MnesiaOid, Key, integer) when integer(Key) -> [Key];
+key_to_oid_i(_MnesiaOid, Key, fix_string) when list(Key) -> Key;
+key_to_oid_i(_MnesiaOid, Key, string) when list(Key) -> [length(Key) | Key];
+key_to_oid_i(MnesiaOid, Key, Type) ->
+ exit({bad_snmp_key, [MnesiaOid, Key, Type]}).
+
+keys_to_oid(_MnesiaOid, 0, _Key, Oid, _Types) -> Oid;
+keys_to_oid(MnesiaOid, N, Key, Oid, Types) ->
+ Type = element(N, Types),
+ KeyPart = element(N, Key),
+ Oid2 = key_to_oid_i(MnesiaOid, KeyPart, Type) ++ Oid,
+ keys_to_oid(MnesiaOid, N-1, Key, Oid2, Types).
+
+%%-----------------------------------------------------------------
+%% Func: get_row/2
+%% Args: Name is the name of the table (atom)
+%% RowIndex is an Oid
+%% Returns: {ok, Row} | undefined
+%% Note that the Row returned might contain columns that
+%% are not visible via SNMP. e.g. the first column may be
+%% ifIndex, and the last MFA ({ifIndex, col1, col2, MFA}).
+%% where ifIndex is used only as index (not as a real col),
+%% and MFA as extra info, used by the application.
+%%-----------------------------------------------------------------
+get_row(Name, RowIndex) ->
+ Tree = mnesia_lib:val({Name, {index, snmp}}),
+ case b_lookup(Tree, RowIndex) of
+ {ok, {_RowIndex, Key}} ->
+ [Row] = mnesia:dirty_read({Name, Key}),
+ {ok, Row};
+ _ ->
+ undefined
+ end.
+
+%%-----------------------------------------------------------------
+%% Func: get_next_index/2
+%% Args: Name is the name of the table (atom)
+%% RowIndex is an Oid
+%% Returns: {ok, NextIndex} | endOfTable
+%%-----------------------------------------------------------------
+get_next_index(Name, RowIndex) ->
+ Tree = mnesia_lib:val({Name, {index, snmp}}),
+ case b_lookup_next(Tree, RowIndex) of
+ {ok, {NextIndex, _Key}} ->
+ {ok, NextIndex};
+ _ ->
+ endOfTable
+ end.
+
+%%-----------------------------------------------------------------
+%% Func: get_mnesia_key/2
+%% Purpose: Get the mnesia key corresponding to the RowIndex.
+%% Args: Name is the name of the table (atom)
+%% RowIndex is an Oid
+%% Returns: {ok, Key} | undefiend
+%%-----------------------------------------------------------------
+get_mnesia_key(Name, RowIndex) ->
+ Tree = mnesia_lib:val({Name, {index, snmp}}),
+ case b_lookup(Tree, RowIndex) of
+ {ok, {_RowIndex, Key}} ->
+ {ok, Key};
+ _ ->
+ undefined
+ end.
+
+%%-----------------------------------------------------------------
+%% Encapsulate a bplus_tree in a process.
+%%-----------------------------------------------------------------
+
+b_new(MnesiaTab, Us) ->
+ case supervisor:start_child(mnesia_snmp_sup, [MnesiaTab, Us]) of
+ {ok, Tree} ->
+ Tree;
+ {error, Reason} ->
+ exit({badsnmp, MnesiaTab, Reason})
+ end.
+
+start(MnesiaTab, Us) ->
+ Name = {mnesia_snmp, MnesiaTab},
+ mnesia_monitor:start_proc(Name, ?MODULE, b_init, [self(), Us]).
+
+b_insert(Tree, Key, Val) -> Tree ! {insert, Key, Val}.
+b_delete(Tree, Key) -> Tree ! {delete, Key}.
+b_lookup(Tree, Key) ->
+ Tree ! {lookup, self(), Key},
+ receive
+ {bplus_res, Res} ->
+ Res
+ end.
+b_lookup_next(Tree, Key) ->
+ Tree ! {lookup_next, self(), Key},
+ receive
+ {bplus_res, Res} ->
+ Res
+ end.
+
+b_clear(Tree) ->
+ Tree ! clear,
+ ok.
+
+b_init(Parent, Us) ->
+ %% Do not trap exit
+ Tree = snmp_index:new(Us),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ b_loop(Parent, Tree, Us).
+
+b_loop(Parent, Tree, Us) ->
+ receive
+ {insert, Key, Val} ->
+ NTree = snmp_index:insert(Tree, Key, Val),
+ b_loop(Parent, NTree, Us);
+ {delete, Key} ->
+ NTree = snmp_index:delete(Tree, Key),
+ b_loop(Parent, NTree, Us);
+ {lookup, From, Key} ->
+ Res = snmp_index:get(Tree, Key),
+ From ! {bplus_res, Res},
+ b_loop(Parent, Tree, Us);
+ {lookup_next, From, Key} ->
+ Res = snmp_index:get_next(Tree, Key),
+ From ! {bplus_res, Res},
+ b_loop(Parent, Tree, Us);
+ clear ->
+ catch snmp_index:delete(Tree), %% Catch because delete/1 is not
+ NewTree = snmp_index:new(Us), %% available in old snmp (before R5)
+ b_loop(Parent, NewTree, Us);
+
+ {'EXIT', Parent, Reason} ->
+ exit(Reason);
+
+ {system, From, Msg} ->
+ mnesia_lib:dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], {Tree, Us})
+
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(Parent, _Debug, {Tree, Us}) ->
+ b_loop(Parent, Tree, Us).
+
+system_terminate(Reason, _Parent, _Debug, _Tree) ->
+ exit(Reason).
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_sup.erl
new file mode 100644
index 0000000000..1cbac23e9d
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_snmp_sup.erl
@@ -0,0 +1,39 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_snmp_sup.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_snmp_sup).
+
+-behaviour(supervisor).
+
+-export([start/0, init/1]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% top supervisor callback functions
+
+start() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% sub supervisor callback functions
+
+init([]) ->
+ Flags = {simple_one_for_one, 0, timer:hours(24)}, % Trust the top supervisor
+ MFA = {mnesia_snmp_hook, start, []},
+ Modules = [?MODULE, mnesia_snmp_hook, supervisor],
+ KillAfter = mnesia_kernel_sup:supervisor_timeout(timer:seconds(3)),
+ Workers = [{?MODULE, MFA, transient, KillAfter, worker, Modules}],
+ {ok, {Flags, Workers}}.
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sp.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sp.erl
new file mode 100644
index 0000000000..ad29d3cc78
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sp.erl
@@ -0,0 +1,39 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_sp.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+
+%% To able to generate nice crash reports we need a catch on the highest level.
+%% This code can't be purged so a code change is not possible.
+%% And hence this a simple module.
+
+-module(mnesia_sp).
+
+-export([init_proc/4]).
+
+init_proc(Who, Mod, Fun, Args) ->
+ mnesia_lib:verbose("~p starting: ~p~n", [Who, self()]),
+ case catch apply(Mod, Fun, Args) of
+ {'EXIT', Reason} ->
+ mnesia_monitor:terminate_proc(Who, Reason, Args),
+ exit(Reason);
+ Other ->
+ Other
+ end.
+
+
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_subscr.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_subscr.erl
new file mode 100644
index 0000000000..f077291bc6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_subscr.erl
@@ -0,0 +1,492 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_subscr.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_subscr).
+
+-behaviour(gen_server).
+
+-export([start/0,
+ set_debug_level/1,
+ subscribe/2,
+ unsubscribe/2,
+ unsubscribe_table/1,
+ subscribers/0,
+ report_table_event/4,
+ report_table_event/5,
+ report_table_event/6
+ ]).
+
+%% gen_server callbacks
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3
+ ]).
+
+-include("mnesia.hrl").
+
+-import(mnesia_lib, [error/2]).
+-record(state, {supervisor, pid_tab}).
+
+start() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [self()],
+ [{timeout, infinity}]).
+
+set_debug_level(Level) ->
+ OldEnv = application:get_env(mnesia, debug),
+ case mnesia_monitor:patch_env(debug, Level) of
+ {error, Reason} ->
+ {error, Reason};
+ NewLevel ->
+ set_debug_level(NewLevel, OldEnv)
+ end.
+
+set_debug_level(Level, OldEnv) ->
+ case mnesia:system_info(is_running) of
+ no when OldEnv == undefined ->
+ none;
+ no ->
+ {ok, E} = OldEnv,
+ E;
+ _ ->
+ Old = mnesia_lib:val(debug),
+ Local = mnesia:system_info(local_tables),
+ E = whereis(mnesia_event),
+ Sub = fun(Tab) -> subscribe(E, {table, Tab}) end,
+ UnSub = fun(Tab) -> unsubscribe(E, {table, Tab}) end,
+
+ case Level of
+ none ->
+ lists:foreach(UnSub, Local);
+ verbose ->
+ lists:foreach(UnSub, Local);
+ debug ->
+ lists:foreach(UnSub, Local -- [schema]),
+ Sub(schema);
+ trace ->
+ lists:foreach(Sub, Local)
+ end,
+ mnesia_lib:set(debug, Level),
+ Old
+ end.
+
+subscribe(ClientPid, system) ->
+ change_subscr(activate, ClientPid, system);
+subscribe(ClientPid, {table, Tab}) ->
+ change_subscr(activate, ClientPid, {table, Tab, simple});
+subscribe(ClientPid, {table, Tab, simple}) ->
+ change_subscr(activate, ClientPid, {table, Tab, simple});
+subscribe(ClientPid, {table, Tab, detailed}) ->
+ change_subscr(activate, ClientPid, {table, Tab, detailed});
+subscribe(_ClientPid, What) ->
+ {error, {badarg, What}}.
+
+unsubscribe(ClientPid, system) ->
+ change_subscr(deactivate, ClientPid, system);
+unsubscribe(ClientPid, {table, Tab}) ->
+ change_subscr(deactivate, ClientPid, {table, Tab, simple});
+unsubscribe(ClientPid, {table, Tab, simple}) ->
+ change_subscr(deactivate, ClientPid, {table, Tab, simple});
+unsubscribe(ClientPid, {table, Tab, detailed}) ->
+ change_subscr(deactivate, ClientPid, {table, Tab, detailed});
+unsubscribe(_ClientPid, What) ->
+ {error, {badarg, What}}.
+
+unsubscribe_table(Tab) ->
+ call({change, {deactivate_table, Tab}}).
+
+change_subscr(Kind, ClientPid, What) ->
+ call({change, {Kind, ClientPid, What}}).
+
+subscribers() ->
+ [whereis(mnesia_event) | mnesia_lib:val(subscribers)].
+
+report_table_event(Tab, Tid, Obj, Op) ->
+ case ?catch_val({Tab, commit_work}) of
+ {'EXIT', _} -> ok;
+ Commit ->
+ case lists:keysearch(subscribers, 1, Commit) of
+ false -> ok;
+ {value, Subs} ->
+ report_table_event(Subs, Tab, Tid, Obj, Op, undefined)
+ end
+ end.
+
+%% Backwards compatible for the moment when mnesia_tm get's updated!
+report_table_event(Subscr, Tab, Tid, Obj, Op) ->
+ report_table_event(Subscr, Tab, Tid, Obj, Op, undefined).
+
+report_table_event({subscribers, S1, S2}, Tab, Tid, _Obj, clear_table, _Old) ->
+ What = {delete, {schema, Tab}, Tid},
+ deliver(S1, {mnesia_table_event, What}),
+ TabDef = mnesia_schema:cs2list(?catch_val({Tab, cstruct})),
+ What2 = {write, {schema, Tab, TabDef}, Tid},
+ deliver(S1, {mnesia_table_event, What2}),
+ What3 = {delete, schema, {schema, Tab}, [{schema, Tab, TabDef}], Tid},
+ deliver(S2, {mnesia_table_event, What3}),
+ What4 = {write, schema, {schema, Tab, TabDef}, [], Tid},
+ deliver(S2, {mnesia_table_event, What4});
+
+report_table_event({subscribers, Subscr, []}, Tab, Tid, Obj, Op, _Old) ->
+ What = {Op, patch_record(Tab, Obj), Tid},
+ deliver(Subscr, {mnesia_table_event, What});
+
+report_table_event({subscribers, S1, S2}, Tab, Tid, Obj, Op, Old) ->
+ Standard = {Op, patch_record(Tab, Obj), Tid},
+ deliver(S1, {mnesia_table_event, Standard}),
+ Extended = what(Tab, Tid, Obj, Op, Old),
+ deliver(S2, Extended);
+
+%% Backwards compatible for the moment when mnesia_tm get's updated!
+report_table_event({subscribers, Subscr}, Tab, Tid, Obj, Op, Old) ->
+ report_table_event({subscribers, Subscr, []}, Tab, Tid, Obj, Op, Old).
+
+
+patch_record(Tab, Obj) ->
+ case Tab == element(1, Obj) of
+ true ->
+ Obj;
+ false ->
+ setelement(1, Obj, Tab)
+ end.
+
+what(Tab, Tid, {RecName, Key}, delete, undefined) ->
+ case catch mnesia_lib:db_get(Tab, Key) of
+ Old when list(Old) -> %% Op only allowed for set table.
+ {mnesia_table_event, {delete, Tab, {RecName, Key}, Old, Tid}};
+ _ ->
+ %% Record just deleted by a dirty_op or
+ %% the whole table has been deleted
+ ignore
+ end;
+what(Tab, Tid, Obj, delete, Old) ->
+ {mnesia_table_event, {delete, Tab, Obj, Old, Tid}};
+what(Tab, Tid, Obj, delete_object, _Old) ->
+ {mnesia_table_event, {delete, Tab, Obj, [Obj], Tid}};
+what(Tab, Tid, Obj, write, undefined) ->
+ case catch mnesia_lib:db_get(Tab, element(2, Obj)) of
+ Old when list(Old) ->
+ {mnesia_table_event, {write, Tab, Obj, Old, Tid}};
+ {'EXIT', _} ->
+ ignore
+ end.
+
+deliver(_, ignore) ->
+ ok;
+deliver([Pid | Pids], Msg) ->
+ Pid ! Msg,
+ deliver(Pids, Msg);
+deliver([], _Msg) ->
+ ok.
+
+call(Msg) ->
+ Pid = whereis(?MODULE),
+ case Pid of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ Res = gen_server:call(Pid, Msg, infinity),
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', _Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Callback functions from gen_server
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ ClientPid = whereis(mnesia_event),
+ link(ClientPid),
+ mnesia_lib:verbose("~p starting: ~p~n", [?MODULE, self()]),
+ Tab = ?ets_new_table(mnesia_subscr, [duplicate_bag, private]),
+ ?ets_insert(Tab, {ClientPid, system}),
+ {ok, #state{supervisor = Parent, pid_tab = Tab}}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%%----------------------------------------------------------------------
+handle_call({change, How}, _From, State) ->
+ Reply = do_change(How, State#state.pid_tab),
+ {reply, Reply, State};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_info({'EXIT', Pid, _R}, State) when Pid == State#state.supervisor ->
+ {stop, shutdown, State};
+
+handle_info({'EXIT', Pid, _Reason}, State) ->
+ handle_exit(Pid, State#state.pid_tab),
+ {noreply, State};
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+terminate(Reason, State) ->
+ prepare_stop(State#state.pid_tab),
+ mnesia_monitor:terminate_proc(?MODULE, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+do_change({activate, ClientPid, system}, SubscrTab) when pid(ClientPid) ->
+ Var = subscribers,
+ activate(ClientPid, system, Var, subscribers(), SubscrTab);
+do_change({activate, ClientPid, {table, Tab, How}}, SubscrTab) when pid(ClientPid) ->
+ case ?catch_val({Tab, where_to_read}) of
+ Node when Node == node() ->
+ Var = {Tab, commit_work},
+ activate(ClientPid, {table, Tab, How}, Var, mnesia_lib:val(Var), SubscrTab);
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ _Node ->
+ {error, {not_active_local, Tab}}
+ end;
+do_change({deactivate, ClientPid, system}, SubscrTab) ->
+ Var = subscribers,
+ deactivate(ClientPid, system, Var, SubscrTab);
+do_change({deactivate, ClientPid, {table, Tab, How}}, SubscrTab) ->
+ Var = {Tab, commit_work},
+ deactivate(ClientPid, {table, Tab, How}, Var, SubscrTab);
+do_change({deactivate_table, Tab}, SubscrTab) ->
+ Var = {Tab, commit_work},
+ case ?catch_val(Var) of
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ CommitWork ->
+ case lists:keysearch(subscribers, 1, CommitWork) of
+ false ->
+ ok;
+ {value, Subs} ->
+ Simple = {table, Tab, simple},
+ Detailed = {table, Tab, detailed},
+ Fs = fun(C) -> deactivate(C, Simple, Var, SubscrTab) end,
+ Fd = fun(C) -> deactivate(C, Detailed, Var, SubscrTab) end,
+ case Subs of
+ {subscribers, L1, L2} ->
+ lists:foreach(Fs, L1),
+ lists:foreach(Fd, L2);
+ {subscribers, L1} ->
+ lists:foreach(Fs, L1)
+ end
+ end,
+ {ok, node()}
+ end;
+do_change(_, _) ->
+ {error, badarg}.
+
+activate(ClientPid, What, Var, OldSubscribers, SubscrTab) ->
+ Old =
+ if Var == subscribers ->
+ OldSubscribers;
+ true ->
+ case lists:keysearch(subscribers, 1, OldSubscribers) of
+ false -> [];
+ {value, Subs} ->
+ case Subs of
+ {subscribers, L1, L2} ->
+ L1 ++ L2;
+ {subscribers, L1} ->
+ L1
+ end
+ end
+ end,
+ case lists:member(ClientPid, Old) of
+ false ->
+ %% Don't care about checking old links
+ case catch link(ClientPid) of
+ true ->
+ ?ets_insert(SubscrTab, {ClientPid, What}),
+ add_subscr(Var, What, ClientPid),
+ {ok, node()};
+ {'EXIT', _Reason} ->
+ {error, {no_exists, ClientPid}}
+ end;
+ true ->
+ {error, {already_exists, What}}
+ end.
+
+%%-record(subscribers, {pids = []}). Old subscriber record removed
+%% To solve backward compatibility, this code is a cludge..
+add_subscr(subscribers, _What, Pid) ->
+ mnesia_lib:add(subscribers, Pid),
+ {ok, node()};
+add_subscr({Tab, commit_work}, What, Pid) ->
+ Commit = mnesia_lib:val({Tab, commit_work}),
+ case lists:keysearch(subscribers, 1, Commit) of
+ false ->
+ Subscr =
+ case What of
+ {table, _, simple} ->
+ {subscribers, [Pid], []};
+ {table, _, detailed} ->
+ {subscribers, [], [Pid]}
+ end,
+ mnesia_lib:add({Tab, subscribers}, Pid),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit([Subscr | Commit]));
+ {value, Old} ->
+ {L1, L2} =
+ case Old of
+ {subscribers, L} -> %% Old Way
+ {L, []};
+ {subscribers, SL1, SL2} ->
+ {SL1, SL2}
+ end,
+ Subscr =
+ case What of
+ {table, _, simple} ->
+ {subscribers, [Pid | L1], L2};
+ {table, _, detailed} ->
+ {subscribers, L1, [Pid | L2]}
+ end,
+ NewC = lists:keyreplace(subscribers, 1, Commit, Subscr),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC)),
+ mnesia_lib:add({Tab, subscribers}, Pid)
+ end.
+
+deactivate(ClientPid, What, Var, SubscrTab) ->
+ ?ets_match_delete(SubscrTab, {ClientPid, What}),
+ case catch ?ets_lookup_element(SubscrTab, ClientPid, 1) of
+ List when list(List) ->
+ ignore;
+ {'EXIT', _} ->
+ unlink(ClientPid)
+ end,
+ del_subscr(Var, What, ClientPid),
+ {ok, node()}.
+
+del_subscr(subscribers, _What, Pid) ->
+ mnesia_lib:del(subscribers, Pid);
+del_subscr({Tab, commit_work}, What, Pid) ->
+ Commit = mnesia_lib:val({Tab, commit_work}),
+ case lists:keysearch(subscribers, 1, Commit) of
+ false ->
+ false;
+ {value, Old} ->
+ {L1, L2} =
+ case Old of
+ {subscribers, L} -> %% Old Way
+ {L, []};
+ {subscribers, SL1, SL2} ->
+ {SL1, SL2}
+ end,
+ Subscr =
+ case What of %% Ignore user error delete subscr from any list
+ {table, _, simple} ->
+ NewL1 = lists:delete(Pid, L1),
+ NewL2 = lists:delete(Pid, L2),
+ {subscribers, NewL1, NewL2};
+ {table, _, detailed} ->
+ NewL1 = lists:delete(Pid, L1),
+ NewL2 = lists:delete(Pid, L2),
+ {subscribers, NewL1, NewL2}
+ end,
+ case Subscr of
+ {subscribers, [], []} ->
+ NewC = lists:keydelete(subscribers, 1, Commit),
+ mnesia_lib:del({Tab, subscribers}, Pid),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC));
+ _ ->
+ NewC = lists:keyreplace(subscribers, 1, Commit, Subscr),
+ mnesia_lib:del({Tab, subscribers}, Pid),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC))
+ end
+ end.
+
+handle_exit(ClientPid, SubscrTab) ->
+ do_handle_exit(?ets_lookup(SubscrTab, ClientPid)),
+ ?ets_delete(SubscrTab, ClientPid).
+
+do_handle_exit([{ClientPid, What} | Tail]) ->
+ case What of
+ system ->
+ del_subscr(subscribers, What, ClientPid);
+ {_, Tab, _Level} ->
+ del_subscr({Tab, commit_work}, What, ClientPid)
+ end,
+ do_handle_exit(Tail);
+do_handle_exit([]) ->
+ ok.
+
+prepare_stop(SubscrTab) ->
+ mnesia_lib:report_system_event({mnesia_down, node()}),
+ do_prepare_stop(?ets_first(SubscrTab), SubscrTab).
+
+do_prepare_stop('$end_of_table', _SubscrTab) ->
+ ok;
+do_prepare_stop(ClientPid, SubscrTab) ->
+ Next = ?ets_next(SubscrTab, ClientPid),
+ handle_exit(ClientPid, SubscrTab),
+ unlink(ClientPid),
+ do_prepare_stop(Next, SubscrTab).
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sup.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sup.erl
new file mode 100644
index 0000000000..a8a1df885f
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_sup.erl
@@ -0,0 +1,137 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_sup.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+%% Supervisor for the entire Mnesia application
+
+-module(mnesia_sup).
+
+-behaviour(application).
+-behaviour(supervisor).
+
+-export([start/0, start/2, init/1, stop/1, start_event/0, kill/0]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% application and suprvisor callback functions
+
+start(normal, Args) ->
+ SupName = {local,?MODULE},
+ case supervisor:start_link(SupName, ?MODULE, [Args]) of
+ {ok, Pid} ->
+ {ok, Pid, {normal, Args}};
+ Error ->
+ Error
+ end;
+start(_, _) ->
+ {error, badarg}.
+
+start() ->
+ SupName = {local,?MODULE},
+ supervisor:start_link(SupName, ?MODULE, []).
+
+stop(_StartArgs) ->
+ ok.
+
+init([]) -> % Supervisor
+ init();
+init([[]]) -> % Application
+ init();
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+init() ->
+ Flags = {one_for_all, 0, 3600}, % Should be rest_for_one policy
+
+ Event = event_procs(),
+ Kernel = kernel_procs(),
+ Mnemosyne = mnemosyne_procs(),
+
+ {ok, {Flags, Event ++ Kernel ++ Mnemosyne}}.
+
+event_procs() ->
+ KillAfter = timer:seconds(30),
+ KA = mnesia_kernel_sup:supervisor_timeout(KillAfter),
+ E = mnesia_event,
+ [{E, {?MODULE, start_event, []}, permanent, KA, worker, [E, gen_event]}].
+
+kernel_procs() ->
+ K = mnesia_kernel_sup,
+ KA = infinity,
+ [{K, {K, start, []}, permanent, KA, supervisor, [K, supervisor]}].
+
+mnemosyne_procs() ->
+ case mnesia_monitor:get_env(embedded_mnemosyne) of
+ true ->
+ Q = mnemosyne_sup,
+ KA = infinity,
+ [{Q, {Q, start, []}, permanent, KA, supervisor, [Q, supervisor]}];
+ false ->
+ []
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% event handler
+
+start_event() ->
+ case gen_event:start_link({local, mnesia_event}) of
+ {ok, Pid} ->
+ case add_event_handler() of
+ ok ->
+ {ok, Pid};
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+add_event_handler() ->
+ Handler = mnesia_monitor:get_env(event_module),
+ gen_event:add_handler(mnesia_event, Handler, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% debug functions
+
+kill() ->
+ Mnesia = [mnesia_fallback | mnesia:ms()],
+ Mnemosyne = mnemosyne_ms(),
+ Kill = fun(Name) -> catch exit(whereis(Name), kill) end,
+ lists:foreach(Kill, Mnemosyne),
+ lists:foreach(Kill, Mnesia),
+ lists:foreach(fun ensure_dead/1, Mnemosyne),
+ lists:foreach(fun ensure_dead/1, Mnesia),
+ timer:sleep(10),
+ case lists:keymember(mnesia, 1, application:which_applications()) of
+ true -> kill();
+ false -> ok
+ end.
+
+ensure_dead(Name) ->
+ case whereis(Name) of
+ undefined ->
+ ok;
+ Pid when pid(Pid) ->
+ exit(Pid, kill),
+ timer:sleep(10),
+ ensure_dead(Name)
+ end.
+
+mnemosyne_ms() ->
+ case mnesia_monitor:get_env(embedded_mnemosyne) of
+ true -> mnemosyne:ms();
+ false -> []
+ end.
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_text.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_text.erl
new file mode 100644
index 0000000000..e6084efbb1
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_text.erl
@@ -0,0 +1,191 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_text.erl,v 1.2 2010/03/04 13:54:20 maria Exp $
+%%
+-module(mnesia_text).
+
+-export([parse/1, file/1, load_textfile/1, dump_to_textfile/1]).
+
+load_textfile(File) ->
+ ensure_started(),
+ case parse(File) of
+ {ok, {Tabs, Data}} ->
+ Badtabs = make_tabs(lists:map(fun validate_tab/1, Tabs)),
+ load_data(del_data(Badtabs, Data, []));
+ Other ->
+ Other
+ end.
+
+dump_to_textfile(File) ->
+ dump_to_textfile(mnesia_lib:is_running(), file:open(File, [write])).
+dump_to_textfile(yes, {ok, F}) ->
+ Tabs = lists:delete(schema, mnesia_lib:local_active_tables()),
+ Defs = lists:map(fun(T) -> {T, [{record_name, mnesia_lib:val({T, record_name})},
+ {attributes, mnesia_lib:val({T, attributes})}]}
+ end,
+ Tabs),
+ io:format(F, "~p.~n", [{tables, Defs}]),
+ lists:foreach(fun(T) -> dump_tab(F, T) end, Tabs),
+ file:close(F);
+dump_to_textfile(_,_) -> error.
+
+
+dump_tab(F, T) ->
+ W = mnesia_lib:val({T, wild_pattern}),
+ {'atomic',All} = mnesia:transaction(fun() -> mnesia:match_object(T, W, read) end),
+ lists:foreach(fun(Term) -> io:format(F,"~p.~n", [setelement(1, Term, T)]) end, All).
+
+
+ensure_started() ->
+ case mnesia_lib:is_running() of
+ yes ->
+ yes;
+ no ->
+ case mnesia_lib:exists(mnesia_lib:dir("schema.DAT")) of
+ true ->
+ mnesia:start();
+ false ->
+ mnesia:create_schema([node()]),
+ mnesia:start()
+ end
+ end.
+
+del_data(Bad, [H|T], Ack) ->
+ case lists:member(element(1, H), Bad) of
+ true -> del_data(Bad, T, Ack);
+ false -> del_data(Bad, T, [H|Ack])
+ end;
+del_data(_Bad, [], Ack) ->
+ lists:reverse(Ack).
+
+%% Tis the place to call the validate func in mnesia_schema
+validate_tab({Tabname, List}) ->
+ {Tabname, List};
+validate_tab({Tabname, RecName, List}) ->
+ {Tabname, RecName, List};
+validate_tab(_) -> error(badtab).
+
+make_tabs([{Tab, Def} | Tail]) ->
+ case catch mnesia:table_info(Tab, where_to_read) of
+ {'EXIT', _} -> %% non-existing table
+ case mnesia:create_table(Tab, Def) of
+ {aborted, Reason} ->
+ io:format("** Failed to create table ~w ~n"
+ "** Reason = ~w, Args = ~p~n",
+ [Tab, Reason, Def]),
+ [Tab | make_tabs(Tail)];
+ _ ->
+ io:format("New table ~w~n", [Tab]),
+ make_tabs(Tail)
+ end;
+ Node ->
+ io:format("** Table ~w already exists on ~p, just entering data~n",
+ [Tab, Node]),
+ make_tabs(Tail)
+ end;
+
+make_tabs([]) ->
+ [].
+
+load_data(L) ->
+ mnesia:transaction(fun() ->
+ F = fun(X) ->
+ Tab = element(1, X),
+ RN = mnesia:table_info(Tab, record_name),
+ Rec = setelement(1, X, RN),
+ mnesia:write(Tab, Rec, write) end,
+ lists:foreach(F, L)
+ end).
+
+parse(File) ->
+ case file(File) of
+ {ok, Terms} ->
+ case catch collect(Terms) of
+ {error, X} ->
+ {error, X};
+ Other ->
+ {ok, Other}
+ end;
+ Other ->
+ Other
+ end.
+
+collect([{_, {tables, Tabs}}|L]) ->
+ {Tabs, collect_data(Tabs, L)};
+
+collect(_) ->
+ io:format("No tables found\n", []),
+ error(bad_header).
+
+collect_data(Tabs, [{Line, Term} | Tail]) when tuple(Term) ->
+ case lists:keysearch(element(1, Term), 1, Tabs) of
+ {value, _} ->
+ [Term | collect_data(Tabs, Tail)];
+ _Other ->
+ io:format("Object:~p at line ~w unknown\n", [Term,Line]),
+ error(undefined_object)
+ end;
+collect_data(_Tabs, []) -> [];
+collect_data(_Tabs, [H|_T]) ->
+ io:format("Object:~p unknown\n", [H]),
+ error(undefined_object).
+
+error(What) -> throw({error, What}).
+
+file(File) ->
+ case file:open(File, [read]) of
+ {ok, Stream} ->
+ Res = read_terms(Stream, File, 1, []),
+ file:close(Stream),
+ Res;
+ _Other ->
+ {error, open}
+ end.
+
+read_terms(Stream, File, Line, L) ->
+ case read_term_from_stream(Stream, File, Line) of
+ {ok, Term, NextLine} ->
+ read_terms(Stream, File, NextLine, [Term|L]);
+ error ->
+ {error, read};
+ eof ->
+ {ok, lists:reverse(L)}
+ end.
+
+read_term_from_stream(Stream, File, Line) ->
+ R = io:request(Stream, {get_until,'',erl_scan,tokens,[Line]}),
+ case R of
+ {ok,Toks,EndLine} ->
+ case erl_parse:parse_term(Toks) of
+ {ok, Term} ->
+ {ok, {Line, Term}, EndLine};
+ {error, {NewLine,Mod,What}} ->
+ Str = Mod:format_error(What),
+ io:format("Error in line:~p of:~p ~s\n",
+ [NewLine, File, Str]),
+ error;
+ T ->
+ io:format("Error2 **~p~n",[T]),
+ error
+ end;
+ {eof,_EndLine} ->
+ eof;
+ Other ->
+ io:format("Error1 **~p~n",[Other]),
+ error
+ end.
+
+
diff --git a/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_tm.erl b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_tm.erl
new file mode 100644
index 0000000000..7bee382a89
--- /dev/null
+++ b/lib/dialyzer/test/r9c_tests_SUITE_data/src/mnesia/mnesia_tm.erl
@@ -0,0 +1,2173 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_tm.erl,v 1.2 2010/03/04 13:54:20 maria Exp $
+%%
+-module(mnesia_tm).
+
+-export([
+ start/0,
+ init/1,
+ non_transaction/5,
+ transaction/6,
+ commit_participant/5,
+ dirty/2,
+ display_info/2,
+ do_update_op/3,
+ get_info/1,
+ get_transactions/0,
+ info/1,
+ mnesia_down/1,
+ prepare_checkpoint/2,
+ prepare_checkpoint/1, % Internal
+ prepare_snmp/3,
+ do_snmp/2,
+ put_activity_id/1,
+ block_tab/1,
+ unblock_tab/1
+ ]).
+
+%% sys callback functions
+-export([system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [set/2]).
+-import(mnesia_lib, [fatal/2, verbose/2, dbg_out/2]).
+
+-record(state, {coordinators = [], participants = [], supervisor,
+ blocked_tabs = [], dirty_queue = []}).
+%% Format on coordinators is [{Tid, EtsTabList} .....
+
+-record(prep, {protocol = sym_trans,
+ %% async_dirty | sync_dirty | sym_trans | sync_sym_trans | asym_trans
+ records = [],
+ prev_tab = [], % initiate to a non valid table name
+ prev_types,
+ prev_snmp,
+ types
+ }).
+
+-record(participant, {tid, pid, commit, disc_nodes = [],
+ ram_nodes = [], protocol = sym_trans}).
+
+start() ->
+ mnesia_monitor:start_proc(?MODULE, ?MODULE, init, [self()]).
+
+init(Parent) ->
+ register(?MODULE, self()),
+ process_flag(trap_exit, true),
+
+ %% Initialize the schema
+ IgnoreFallback = mnesia_monitor:get_env(ignore_fallback_at_startup),
+ mnesia_bup:tm_fallback_start(IgnoreFallback),
+ mnesia_schema:init(IgnoreFallback),
+
+ %% Handshake and initialize transaction recovery
+ mnesia_recover:init(),
+ Early = mnesia_monitor:init(),
+ AllOthers = mnesia_lib:uniq(Early ++ mnesia_lib:all_nodes()) -- [node()],
+ set(original_nodes, AllOthers),
+ mnesia_recover:connect_nodes(AllOthers),
+
+ %% Recover transactions, may wait for decision
+ case mnesia_monitor:use_dir() of
+ true ->
+ P = mnesia_dumper:opt_dump_log(startup), % previous log
+ L = mnesia_dumper:opt_dump_log(startup), % latest log
+ Msg = "Initial dump of log during startup: ~p~n",
+ mnesia_lib:verbose(Msg, [[P, L]]),
+ mnesia_log:init();
+ false ->
+ ignore
+ end,
+
+ mnesia_schema:purge_tmp_files(),
+ mnesia_recover:start_garb(),
+
+ ?eval_debug_fun({?MODULE, init}, [{nodes, AllOthers}]),
+
+ case val(debug) of
+ Debug when Debug /= debug, Debug /= trace ->
+ ignore;
+ _ ->
+ mnesia_subscr:subscribe(whereis(mnesia_event), {table, schema})
+ end,
+ proc_lib:init_ack(Parent, {ok, self()}),
+ doit_loop(#state{supervisor = Parent}).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+reply({From,Ref}, R) ->
+ From ! {?MODULE, Ref, R};
+reply(From, R) ->
+ From ! {?MODULE, node(), R}.
+
+reply(From, R, State) ->
+ reply(From, R),
+ doit_loop(State).
+
+req(R) ->
+ case whereis(?MODULE) of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ Ref = make_ref(),
+ Pid ! {{self(), Ref}, R},
+ rec(Pid, Ref)
+ end.
+
+rec() ->
+ rec(whereis(?MODULE)).
+
+rec(Pid) when pid(Pid) ->
+ receive
+ {?MODULE, _, Reply} ->
+ Reply;
+
+ {'EXIT', Pid, _} ->
+ {error, {node_not_running, node()}}
+ end;
+rec(undefined) ->
+ {error, {node_not_running, node()}}.
+
+rec(Pid, Ref) ->
+ receive
+ {?MODULE, Ref, Reply} ->
+ Reply;
+ {'EXIT', Pid, _} ->
+ {error, {node_not_running, node()}}
+ end.
+
+tmlink({From, Ref}) when reference(Ref) ->
+ link(From);
+tmlink(From) ->
+ link(From).
+tmpid({Pid, _Ref}) when pid(Pid) ->
+ Pid;
+tmpid(Pid) ->
+ Pid.
+
+%% Returns a list of participant transaction Tid's
+mnesia_down(Node) ->
+ %% Syncronously call needed in order to avoid
+ %% race with mnesia_tm's coordinator processes
+ %% that may restart and acquire new locks.
+ %% mnesia_monitor takes care of the sync
+ case whereis(?MODULE) of
+ undefined ->
+ mnesia_monitor:mnesia_down(?MODULE, {Node, []});
+ Pid ->
+ Pid ! {mnesia_down, Node}
+ end.
+
+prepare_checkpoint(Nodes, Cp) ->
+ rpc:multicall(Nodes, ?MODULE, prepare_checkpoint, [Cp]).
+
+prepare_checkpoint(Cp) ->
+ req({prepare_checkpoint,Cp}).
+
+block_tab(Tab) ->
+ req({block_tab, Tab}).
+
+unblock_tab(Tab) ->
+ req({unblock_tab, Tab}).
+
+doit_loop(#state{coordinators = Coordinators, participants = Participants, supervisor = Sup}
+ = State) ->
+ receive
+ {_From, {async_dirty, Tid, Commit, Tab}} ->
+ case lists:member(Tab, State#state.blocked_tabs) of
+ false ->
+ do_async_dirty(Tid, Commit, Tab),
+ doit_loop(State);
+ true ->
+ Item = {async_dirty, Tid, Commit, Tab},
+ State2 = State#state{dirty_queue = [Item | State#state.dirty_queue]},
+ doit_loop(State2)
+ end;
+
+ {From, {sync_dirty, Tid, Commit, Tab}} ->
+ case lists:member(Tab, State#state.blocked_tabs) of
+ false ->
+ do_sync_dirty(From, Tid, Commit, Tab),
+ doit_loop(State);
+ true ->
+ Item = {sync_dirty, From, Tid, Commit, Tab},
+ State2 = State#state{dirty_queue = [Item | State#state.dirty_queue]},
+ doit_loop(State2)
+ end;
+
+ {From, start_outer} -> %% Create and associate ets_tab with Tid
+ case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table for the "
+ "local transaction store",
+ reply(From, {error, {system_limit, Msg, Reason}}, State);
+ Etab ->
+ tmlink(From),
+ C = mnesia_recover:incr_trans_tid_serial(),
+ ?ets_insert(Etab, {nodes, node()}),
+ Tid = #tid{pid = tmpid(From), counter = C},
+ A2 = [{Tid , [Etab]} | Coordinators],
+ S2 = State#state{coordinators = A2},
+ reply(From, {new_tid, Tid, Etab}, S2)
+ end;
+
+ {From, {ask_commit, Protocol, Tid, Commit, DiscNs, RamNs}} ->
+ ?eval_debug_fun({?MODULE, doit_ask_commit},
+ [{tid, Tid}, {prot, Protocol}]),
+ mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ Pid =
+ case Protocol of
+ asym_trans when node(Tid#tid.pid) /= node() ->
+ Args = [tmpid(From), Tid, Commit, DiscNs, RamNs],
+ spawn_link(?MODULE, commit_participant, Args);
+ _ when node(Tid#tid.pid) /= node() -> %% *_sym_trans
+ reply(From, {vote_yes, Tid}),
+ nopid
+ end,
+ P = #participant{tid = Tid,
+ pid = Pid,
+ commit = Commit,
+ disc_nodes = DiscNs,
+ ram_nodes = RamNs,
+ protocol = Protocol},
+ State2 = State#state{participants = [P | Participants]},
+ doit_loop(State2);
+
+ {Tid, do_commit} ->
+ case mnesia_lib:key_search_delete(Tid, #participant.tid, Participants) of
+ {none, _} ->
+ verbose("Tried to commit a non participant transaction ~p~n",
+ [Tid]),
+ doit_loop(State);
+ {P, Participants2} ->
+ ?eval_debug_fun({?MODULE, do_commit, pre},
+ [{tid, Tid}, {participant, P}]),
+ case P#participant.pid of
+ nopid ->
+ Commit = P#participant.commit,
+ Member = lists:member(node(), P#participant.disc_nodes),
+ if Member == false ->
+ ignore;
+ P#participant.protocol == sym_trans ->
+ mnesia_log:log(Commit);
+ P#participant.protocol == sync_sym_trans ->
+ mnesia_log:slog(Commit)
+ end,
+ mnesia_recover:note_decision(Tid, committed),
+ do_commit(Tid, Commit),
+ if
+ P#participant.protocol == sync_sym_trans ->
+ Tid#tid.pid ! {?MODULE, node(), {committed, Tid}};
+ true ->
+ ignore
+ end,
+ mnesia_locker:release_tid(Tid),
+ transaction_terminated(Tid),
+ ?eval_debug_fun({?MODULE, do_commit, post}, [{tid, Tid}, {pid, nopid}]),
+ doit_loop(State#state{participants = Participants2});
+ Pid when pid(Pid) ->
+ Pid ! {Tid, committed},
+ ?eval_debug_fun({?MODULE, do_commit, post}, [{tid, Tid}, {pid, Pid}]),
+ doit_loop(State)
+ end
+ end;
+
+ {Tid, simple_commit} ->
+ mnesia_recover:note_decision(Tid, committed),
+ mnesia_locker:release_tid(Tid),
+ transaction_terminated(Tid),
+ doit_loop(State);
+
+ {Tid, {do_abort, Reason}} ->
+ ?eval_debug_fun({?MODULE, do_abort, pre}, [{tid, Tid}]),
+ mnesia_locker:release_tid(Tid),
+ case mnesia_lib:key_search_delete(Tid, #participant.tid, Participants) of
+ {none, _} ->
+ verbose("Tried to abort a non participant transaction ~p: ~p~n",
+ [Tid, Reason]),
+ doit_loop(State);
+ {P, Participants2} ->
+ case P#participant.pid of
+ nopid ->
+ Commit = P#participant.commit,
+ mnesia_recover:note_decision(Tid, aborted),
+ do_abort(Tid, Commit),
+ if
+ P#participant.protocol == sync_sym_trans ->
+ Tid#tid.pid ! {?MODULE, node(), {aborted, Tid}};
+ true ->
+ ignore
+ end,
+ transaction_terminated(Tid),
+ ?eval_debug_fun({?MODULE, do_abort, post}, [{tid, Tid}, {pid, nopid}]),
+ doit_loop(State#state{participants = Participants2});
+ Pid when pid(Pid) ->
+ Pid ! {Tid, {do_abort, Reason}},
+ ?eval_debug_fun({?MODULE, do_abort, post},
+ [{tid, Tid}, {pid, Pid}]),
+ doit_loop(State)
+ end
+ end;
+
+ {From, {add_store, Tid}} -> %% new store for nested transaction
+ case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table for a nested "
+ "local transaction store",
+ reply(From, {error, {system_limit, Msg, Reason}}, State);
+ Etab ->
+ A2 = add_coord_store(Coordinators, Tid, Etab),
+ reply(From, {new_store, Etab},
+ State#state{coordinators = A2})
+ end;
+
+ {From, {del_store, Tid, Current, Obsolete, PropagateStore}} ->
+ opt_propagate_store(Current, Obsolete, PropagateStore),
+ A2 = del_coord_store(Coordinators, Tid, Current, Obsolete),
+ reply(From, store_erased, State#state{coordinators = A2});
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason, State);
+
+ {From, {restart, Tid, Store}} ->
+ A2 = restore_stores(Coordinators, Tid, Store),
+ ?ets_match_delete(Store, '_'),
+ ?ets_insert(Store, {nodes, node()}),
+ reply(From, {restarted, Tid}, State#state{coordinators = A2});
+
+ {delete_transaction, Tid} ->
+ %% used to clear transactions which are committed
+ %% in coordinator or participant processes
+ case mnesia_lib:key_search_delete(Tid, #participant.tid, Participants) of
+ {none, _} ->
+ case mnesia_lib:key_search_delete(Tid, 1, Coordinators) of
+ {none, _} ->
+ verbose("** ERROR ** Tried to delete a non transaction ~p~n",
+ [Tid]),
+ doit_loop(State);
+ {{_Tid, Etabs}, A2} ->
+ erase_ets_tabs(Etabs),
+ transaction_terminated(Tid),
+ doit_loop(State#state{coordinators = A2})
+ end;
+ {_P, Participants2} ->
+ transaction_terminated(Tid),
+ State2 = State#state{participants = Participants2},
+ doit_loop(State2)
+ end;
+
+ {sync_trans_serial, Tid} ->
+ %% Do the Lamport thing here
+ mnesia_recover:sync_trans_tid_serial(Tid),
+ doit_loop(State);
+
+ {From, info} ->
+ reply(From, {info, Participants, Coordinators}, State);
+
+ {mnesia_down, N} ->
+ verbose("Got mnesia_down from ~p, reconfiguring...~n", [N]),
+ reconfigure_coordinators(N, Coordinators),
+
+ Tids = [P#participant.tid || P <- Participants],
+ reconfigure_participants(N, Participants),
+ mnesia_monitor:mnesia_down(?MODULE, {N, Tids}),
+ doit_loop(State);
+
+ {From, {unblock_me, Tab}} ->
+ case lists:member(Tab, State#state.blocked_tabs) of
+ false ->
+ verbose("Wrong dirty Op blocked on ~p ~p ~p",
+ [node(), Tab, From]),
+ reply(From, unblocked),
+ doit_loop(State);
+ true ->
+ Item = {Tab, unblock_me, From},
+ State2 = State#state{dirty_queue = [Item | State#state.dirty_queue]},
+ doit_loop(State2)
+ end;
+
+ {From, {block_tab, Tab}} ->
+ State2 = State#state{blocked_tabs = [Tab | State#state.blocked_tabs]},
+ reply(From, ok, State2);
+
+ {From, {unblock_tab, Tab}} ->
+ BlockedTabs2 = State#state.blocked_tabs -- [Tab],
+ case lists:member(Tab, BlockedTabs2) of
+ false ->
+ mnesia_controller:unblock_table(Tab),
+ Queue = process_dirty_queue(Tab, State#state.dirty_queue),
+ State2 = State#state{blocked_tabs = BlockedTabs2,
+ dirty_queue = Queue},
+ reply(From, ok, State2);
+ true ->
+ State2 = State#state{blocked_tabs = BlockedTabs2},
+ reply(From, ok, State2)
+ end;
+
+ {From, {prepare_checkpoint, Cp}} ->
+ Res = mnesia_checkpoint:tm_prepare(Cp),
+ case Res of
+ {ok, _Name, IgnoreNew, _Node} ->
+ prepare_pending_coordinators(Coordinators, IgnoreNew),
+ prepare_pending_participants(Participants, IgnoreNew);
+ {error, _Reason} ->
+ ignore
+ end,
+ reply(From, Res, State);
+
+ {system, From, Msg} ->
+ dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ sys:handle_system_msg(Msg, From, Sup, ?MODULE, [], State);
+
+ Msg ->
+ verbose("** ERROR ** ~p got unexpected message: ~p~n", [?MODULE, Msg]),
+ doit_loop(State)
+ end.
+
+do_sync_dirty(From, Tid, Commit, _Tab) ->
+ ?eval_debug_fun({?MODULE, sync_dirty, pre}, [{tid, Tid}]),
+ Res = (catch do_dirty(Tid, Commit)),
+ ?eval_debug_fun({?MODULE, sync_dirty, post}, [{tid, Tid}]),
+ From ! {?MODULE, node(), {dirty_res, Res}}.
+
+do_async_dirty(Tid, Commit, _Tab) ->
+ ?eval_debug_fun({?MODULE, async_dirty, pre}, [{tid, Tid}]),
+ catch do_dirty(Tid, Commit),
+ ?eval_debug_fun({?MODULE, async_dirty, post}, [{tid, Tid}]).
+
+%% Process items in fifo order
+process_dirty_queue(Tab, [Item | Queue]) ->
+ Queue2 = process_dirty_queue(Tab, Queue),
+ case Item of
+ {async_dirty, Tid, Commit, Tab} ->
+ do_async_dirty(Tid, Commit, Tab),
+ Queue2;
+ {sync_dirty, From, Tid, Commit, Tab} ->
+ do_sync_dirty(From, Tid, Commit, Tab),
+ Queue2;
+ {Tab, unblock_me, From} ->
+ reply(From, unblocked),
+ Queue2;
+ _ ->
+ [Item | Queue2]
+ end;
+process_dirty_queue(_Tab, []) ->
+ [].
+
+prepare_pending_coordinators([{Tid, [Store | _Etabs]} | Coords], IgnoreNew) ->
+ case catch ?ets_lookup(Store, pending) of
+ [] ->
+ prepare_pending_coordinators(Coords, IgnoreNew);
+ [Pending] ->
+ case lists:member(Tid, IgnoreNew) of
+ false ->
+ mnesia_checkpoint:tm_enter_pending(Pending);
+ true ->
+ ignore
+ end,
+ prepare_pending_coordinators(Coords, IgnoreNew);
+ {'EXIT', _} ->
+ prepare_pending_coordinators(Coords, IgnoreNew)
+ end;
+prepare_pending_coordinators([], _IgnoreNew) ->
+ ok.
+
+prepare_pending_participants([Part | Parts], IgnoreNew) ->
+ Tid = Part#participant.tid,
+ D = Part#participant.disc_nodes,
+ R = Part#participant.ram_nodes,
+ case lists:member(Tid, IgnoreNew) of
+ false ->
+ mnesia_checkpoint:tm_enter_pending(Tid, D, R);
+ true ->
+ ignore
+ end,
+ prepare_pending_participants(Parts, IgnoreNew);
+prepare_pending_participants([], _IgnoreNew) ->
+ ok.
+
+handle_exit(Pid, Reason, State) when node(Pid) /= node() ->
+ %% We got exit from a remote fool
+ dbg_out("~p got remote EXIT from unknown ~p~n",
+ [?MODULE, {Pid, Reason}]),
+ doit_loop(State);
+
+handle_exit(Pid, _Reason, State) when Pid == State#state.supervisor ->
+ %% Our supervisor has died, time to stop
+ do_stop(State);
+
+handle_exit(Pid, Reason, State) ->
+ %% Check if it is a coordinator
+ case pid_search_delete(Pid, State#state.coordinators) of
+ {none, _} ->
+ %% Check if it is a participant
+ case mnesia_lib:key_search_delete(Pid, #participant.pid, State#state.participants) of
+ {none, _} ->
+ %% We got exit from a local fool
+ verbose("** ERROR ** ~p got local EXIT from unknown process: ~p~n",
+ [?MODULE, {Pid, Reason}]),
+ doit_loop(State);
+
+ {P, RestP} when record(P, participant) ->
+ fatal("Participant ~p in transaction ~p died ~p~n",
+ [P#participant.pid, P#participant.tid, Reason]),
+ doit_loop(State#state{participants = RestP})
+ end;
+
+ {{Tid, Etabs}, RestC} ->
+ %% A local coordinator has died and
+ %% we must determine the outcome of the
+ %% transaction and tell mnesia_tm on the
+ %% other nodes about it and then recover
+ %% locally.
+ recover_coordinator(Tid, Etabs),
+ doit_loop(State#state{coordinators = RestC})
+ end.
+
+recover_coordinator(Tid, Etabs) ->
+ verbose("Coordinator ~p in transaction ~p died.~n", [Tid#tid.pid, Tid]),
+
+ Store = hd(Etabs),
+ CheckNodes = get_nodes(Store),
+ TellNodes = CheckNodes -- [node()],
+ case catch arrange(Tid, Store, async) of
+ {'EXIT', Reason} ->
+ dbg_out("Recovery of coordinator ~p failed:~n", [Tid, Reason]),
+ Protocol = asym_trans,
+ tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes);
+ {_N, Prep} ->
+ %% Tell the participants about the outcome
+ Protocol = Prep#prep.protocol,
+ Outcome = tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes),
+
+ %% Recover locally
+ CR = Prep#prep.records,
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ {value, Local} = lists:keysearch(node(), #commit.node, CR),
+
+ ?eval_debug_fun({?MODULE, recover_coordinator, pre},
+ [{tid, Tid}, {outcome, Outcome}, {prot, Protocol}]),
+ recover_coordinator(Tid, Protocol, Outcome, Local, DiscNs, RamNs),
+ ?eval_debug_fun({?MODULE, recover_coordinator, post},
+ [{tid, Tid}, {outcome, Outcome}, {prot, Protocol}])
+
+ end,
+ erase_ets_tabs(Etabs),
+ transaction_terminated(Tid),
+ mnesia_locker:release_tid(Tid).
+
+recover_coordinator(Tid, sym_trans, committed, Local, _, _) ->
+ mnesia_recover:note_decision(Tid, committed),
+ do_dirty(Tid, Local);
+recover_coordinator(Tid, sym_trans, aborted, _Local, _, _) ->
+ mnesia_recover:note_decision(Tid, aborted);
+recover_coordinator(Tid, sync_sym_trans, committed, Local, _, _) ->
+ mnesia_recover:note_decision(Tid, committed),
+ do_dirty(Tid, Local);
+recover_coordinator(Tid, sync_sym_trans, aborted, _Local, _, _) ->
+ mnesia_recover:note_decision(Tid, aborted);
+
+recover_coordinator(Tid, asym_trans, committed, Local, DiscNs, RamNs) ->
+ D = #decision{tid = Tid, outcome = committed,
+ disc_nodes = DiscNs, ram_nodes = RamNs},
+ mnesia_recover:log_decision(D),
+ do_commit(Tid, Local);
+recover_coordinator(Tid, asym_trans, aborted, Local, DiscNs, RamNs) ->
+ D = #decision{tid = Tid, outcome = aborted,
+ disc_nodes = DiscNs, ram_nodes = RamNs},
+ mnesia_recover:log_decision(D),
+ do_abort(Tid, Local).
+
+restore_stores([{Tid, Etstabs} | Tail], Tid, Store) ->
+ Remaining = lists:delete(Store, Etstabs),
+ erase_ets_tabs(Remaining),
+ [{Tid, [Store]} | Tail];
+restore_stores([H | T], Tid, Store) ->
+ [H | restore_stores(T, Tid, Store)].
+%% No NIL case on purpose
+
+add_coord_store([{Tid, Stores} | Coordinators], Tid, Etab) ->
+ [{Tid, [Etab | Stores]} | Coordinators];
+add_coord_store([H | T], Tid, Etab) ->
+ [H | add_coord_store(T, Tid, Etab)].
+%% no NIL case on purpose
+
+del_coord_store([{Tid, Stores} | Coordinators], Tid, Current, Obsolete) ->
+ Rest =
+ case Stores of
+ [Obsolete, Current | Tail] -> Tail;
+ [Current, Obsolete | Tail] -> Tail
+ end,
+ ?ets_delete_table(Obsolete),
+ [{Tid, [Current | Rest]} | Coordinators];
+del_coord_store([H | T], Tid, Current, Obsolete) ->
+ [H | del_coord_store(T, Tid, Current, Obsolete)].
+%% no NIL case on purpose
+
+erase_ets_tabs([H | T]) ->
+ ?ets_delete_table(H),
+ erase_ets_tabs(T);
+erase_ets_tabs([]) ->
+ ok.
+
+%% Deletes a pid from a list of participants
+%% or from a list of coordinators and returns
+%% {none, All} or {Tr, Rest}
+pid_search_delete(Pid, Trs) ->
+ pid_search_delete(Pid, Trs, none, []).
+pid_search_delete(Pid, [Tr = {Tid, _Ts} | Trs], _Val, Ack) when Tid#tid.pid == Pid ->
+ pid_search_delete(Pid, Trs, Tr, Ack);
+pid_search_delete(Pid, [Tr | Trs], Val, Ack) ->
+ pid_search_delete(Pid, Trs, Val, [Tr | Ack]);
+
+pid_search_delete(_Pid, [], Val, Ack) ->
+ {Val, Ack}.
+
+%% When TM gets an EXIT sig, we must also check to see
+%% if the crashing transaction is in the Participant list
+%%
+%% search_participant_for_pid([Participant | Tail], Pid) ->
+%% Tid = Participant#participant.tid,
+%% if
+%% Tid#tid.pid == Pid ->
+%% {coordinator, Participant};
+%% Participant#participant.pid == Pid ->
+%% {participant, Participant};
+%% true ->
+%% search_participant_for_pid(Tail, Pid)
+%% end;
+%% search_participant_for_pid([], _) ->
+%% fool.
+
+transaction_terminated(Tid) ->
+ mnesia_checkpoint:tm_exit_pending(Tid),
+ Pid = Tid#tid.pid,
+ if
+ node(Pid) == node() ->
+ unlink(Pid);
+ true -> %% Do the Lamport thing here
+ mnesia_recover:sync_trans_tid_serial(Tid)
+ end.
+
+non_transaction(OldState, Fun, Args, ActivityKind, Mod) ->
+ Id = {ActivityKind, self()},
+ NewState = {Mod, Id, non_transaction},
+ put(mnesia_activity_state, NewState),
+ %% I Want something uniqe here, references are expensive
+ Ref = mNeSia_nOn_TrAnSacTioN,
+ RefRes = (catch {Ref, apply(Fun, Args)}),
+ case OldState of
+ undefined -> erase(mnesia_activity_state);
+ _ -> put(mnesia_activity_state, OldState)
+ end,
+ case RefRes of
+ {Ref, Res} ->
+ case Res of
+ {'EXIT', Reason} -> exit(Reason);
+ {aborted, Reason} -> mnesia:abort(Reason);
+ _ -> Res
+ end;
+ {'EXIT', Reason} ->
+ exit(Reason);
+ Throw ->
+ throw(Throw)
+ end.
+
+transaction(OldTidTs, Fun, Args, Retries, Mod, Type) ->
+ Factor = 1,
+ case OldTidTs of
+ undefined -> % Outer
+ execute_outer(Mod, Fun, Args, Factor, Retries, Type);
+ {_OldMod, Tid, Ts} -> % Nested
+ execute_inner(Mod, Tid, Ts, Fun, Args, Factor, Retries, Type);
+ _ -> % Bad nesting
+ {aborted, nested_transaction}
+ end.
+
+execute_outer(Mod, Fun, Args, Factor, Retries, Type) ->
+ case req(start_outer) of
+ {error, Reason} ->
+ {aborted, Reason};
+ {new_tid, Tid, Store} ->
+ Ts = #tidstore{store = Store},
+ NewTidTs = {Mod, Tid, Ts},
+ put(mnesia_activity_state, NewTidTs),
+ execute_transaction(Fun, Args, Factor, Retries, Type)
+ end.
+
+execute_inner(Mod, Tid, Ts, Fun, Args, Factor, Retries, Type) ->
+ case req({add_store, Tid}) of
+ {error, Reason} ->
+ {aborted, Reason};
+ {new_store, Ets} ->
+ copy_ets(Ts#tidstore.store, Ets),
+ Up = [Ts#tidstore.store | Ts#tidstore.up_stores],
+ NewTs = Ts#tidstore{level = 1 + Ts#tidstore.level,
+ store = Ets,
+ up_stores = Up},
+ NewTidTs = {Mod, Tid, NewTs},
+ put(mnesia_activity_state, NewTidTs),
+ execute_transaction(Fun, Args, Factor, Retries, Type)
+ end.
+
+copy_ets(From, To) ->
+ do_copy_ets(?ets_first(From), From, To).
+do_copy_ets('$end_of_table', _,_) ->
+ ok;
+do_copy_ets(K, From, To) ->
+ Objs = ?ets_lookup(From, K),
+ insert_objs(Objs, To),
+ do_copy_ets(?ets_next(From, K), From, To).
+
+insert_objs([H|T], Tab) ->
+ ?ets_insert(Tab, H),
+ insert_objs(T, Tab);
+insert_objs([], _Tab) ->
+ ok.
+
+execute_transaction(Fun, Args, Factor, Retries, Type) ->
+ case catch apply_fun(Fun, Args, Type) of
+ {'EXIT', Reason} ->
+ check_exit(Fun, Args, Factor, Retries, Reason, Type);
+ {'atomic', Value} ->
+ mnesia_lib:incr_counter(trans_commits),
+ erase(mnesia_activity_state),
+ %% no need to clear locks, already done by commit ...
+ %% Flush any un processed mnesia_down messages we might have
+ flush_downs(),
+ {'atomic', Value};
+ {nested_atomic, Value} ->
+ mnesia_lib:incr_counter(trans_commits),
+ {'atomic', Value};
+ Value -> %% User called throw
+ Reason = {aborted, {throw, Value}},
+ return_abort(Fun, Args, Reason)
+ end.
+
+apply_fun(Fun, Args, Type) ->
+ Result = apply(Fun, Args),
+ case t_commit(Type) of
+ do_commit ->
+ {'atomic', Result};
+ do_commit_nested ->
+ {nested_atomic, Result};
+ {do_abort, {aborted, Reason}} ->
+ {'EXIT', {aborted, Reason}};
+ {do_abort, Reason} ->
+ {'EXIT', {aborted, Reason}}
+ end.
+
+check_exit(Fun, Args, Factor, Retries, Reason, Type) ->
+ case Reason of
+ {aborted, C} when record(C, cyclic) ->
+ maybe_restart(Fun, Args, Factor, Retries, Type, C);
+ {aborted, {node_not_running, N}} ->
+ maybe_restart(Fun, Args, Factor, Retries, Type, {node_not_running, N});
+ {aborted, {bad_commit, N}} ->
+ maybe_restart(Fun, Args, Factor, Retries, Type, {bad_commit, N});
+ _ ->
+ return_abort(Fun, Args, Reason)
+ end.
+
+maybe_restart(Fun, Args, Factor, Retries, Type, Why) ->
+ {Mod, Tid, Ts} = get(mnesia_activity_state),
+ case try_again(Retries) of
+ yes when Ts#tidstore.level == 1 ->
+ restart(Mod, Tid, Ts, Fun, Args, Factor, Retries, Type, Why);
+ yes ->
+ return_abort(Fun, Args, Why);
+ no ->
+ return_abort(Fun, Args, {aborted, nomore})
+ end.
+
+try_again(infinity) -> yes;
+try_again(X) when number(X) , X > 1 -> yes;
+try_again(_) -> no.
+
+%% We can only restart toplevel transactions.
+%% If a deadlock situation occurs in a nested transaction
+%% The whole thing including all nested transactions need to be
+%% restarted. The stack is thus popped by a consequtive series of
+%% exit({aborted, #cyclic{}}) calls
+
+restart(Mod, Tid, Ts, Fun, Args, Factor0, Retries0, Type, Why) ->
+ mnesia_lib:incr_counter(trans_restarts),
+ Retries = decr(Retries0),
+ case Why of
+ {bad_commit, _N} ->
+ return_abort(Fun, Args, Why),
+ Factor = 1,
+ SleepTime = mnesia_lib:random_time(Factor, Tid#tid.counter),
+ dbg_out("Restarting transaction ~w: in ~wms ~w~n", [Tid, SleepTime, Why]),
+ timer:sleep(SleepTime),
+ execute_outer(Mod, Fun, Args, Factor, Retries, Type);
+ {node_not_running, _N} -> %% Avoids hanging in receive_release_tid_ack
+ return_abort(Fun, Args, Why),
+ Factor = 1,
+ SleepTime = mnesia_lib:random_time(Factor, Tid#tid.counter),
+ dbg_out("Restarting transaction ~w: in ~wms ~w~n", [Tid, SleepTime, Why]),
+ timer:sleep(SleepTime),
+ execute_outer(Mod, Fun, Args, Factor, Retries, Type);
+ _ ->
+ SleepTime = mnesia_lib:random_time(Factor0, Tid#tid.counter),
+ dbg_out("Restarting transaction ~w: in ~wms ~w~n", [Tid, SleepTime, Why]),
+
+ if
+ Factor0 /= 10 ->
+ ignore;
+ true ->
+ %% Our serial may be much larger than other nodes ditto
+ AllNodes = val({current, db_nodes}),
+ verbose("Sync serial ~p~n", [Tid]),
+ rpc:abcast(AllNodes, ?MODULE, {sync_trans_serial, Tid})
+ end,
+ intercept_friends(Tid, Ts),
+ Store = Ts#tidstore.store,
+ Nodes = get_nodes(Store),
+ ?MODULE ! {self(), {restart, Tid, Store}},
+ mnesia_locker:send_release_tid(Nodes, Tid),
+ timer:sleep(SleepTime),
+ mnesia_locker:receive_release_tid_acc(Nodes, Tid),
+ case rec() of
+ {restarted, Tid} ->
+ execute_transaction(Fun, Args, Factor0 + 1,
+ Retries, Type);
+ {error, Reason} ->
+ mnesia:abort(Reason)
+ end
+ end.
+
+decr(infinity) -> infinity;
+decr(X) when integer(X), X > 1 -> X - 1;
+decr(_X) -> 0.
+
+return_abort(Fun, Args, Reason) ->
+ {Mod, Tid, Ts} = get(mnesia_activity_state),
+ OldStore = Ts#tidstore.store,
+ Nodes = get_nodes(OldStore),
+ intercept_friends(Tid, Ts),
+ catch mnesia_lib:incr_counter(trans_failures),
+ Level = Ts#tidstore.level,
+ if
+ Level == 1 ->
+ mnesia_locker:async_release_tid(Nodes, Tid),
+ ?MODULE ! {delete_transaction, Tid},
+ erase(mnesia_activity_state),
+ dbg_out("Transaction ~p calling ~p with ~p, failed ~p~n",
+ [Tid, Fun, Args, Reason]),
+ flush_downs(),
+ {aborted, mnesia_lib:fix_error(Reason)};
+ true ->
+ %% Nested transaction
+ [NewStore | Tail] = Ts#tidstore.up_stores,
+ req({del_store, Tid, NewStore, OldStore, true}),
+ Ts2 = Ts#tidstore{store = NewStore,
+ up_stores = Tail,
+ level = Level - 1},
+ NewTidTs = {Mod, Tid, Ts2},
+ put(mnesia_activity_state, NewTidTs),
+ case Reason of
+ #cyclic{} ->
+ exit({aborted, Reason});
+ {node_not_running, _N} ->
+ exit({aborted, Reason});
+ {bad_commit, _N}->
+ exit({aborted, Reason});
+ _ ->
+ {aborted, mnesia_lib:fix_error(Reason)}
+ end
+ end.
+
+flush_downs() ->
+ receive
+ {?MODULE, _, _} -> flush_downs(); % Votes
+ {mnesia_down, _} -> flush_downs()
+ after 0 -> flushed
+ end.
+
+put_activity_id(undefined) ->
+ erase_activity_id();
+put_activity_id({Mod, Tid, Ts}) when record(Tid, tid), record(Ts, tidstore) ->
+ flush_downs(),
+ Store = Ts#tidstore.store,
+ ?ets_insert(Store, {friends, self()}),
+ NewTidTs = {Mod, Tid, Ts},
+ put(mnesia_activity_state, NewTidTs);
+put_activity_id(SimpleState) ->
+ put(mnesia_activity_state, SimpleState).
+
+erase_activity_id() ->
+ flush_downs(),
+ erase(mnesia_activity_state).
+
+get_nodes(Store) ->
+ case catch ?ets_lookup_element(Store, nodes, 2) of
+ {'EXIT', _} -> [node()];
+ Nodes -> Nodes
+ end.
+
+get_friends(Store) ->
+ case catch ?ets_lookup_element(Store, friends, 2) of
+ {'EXIT', _} -> [];
+ Friends -> Friends
+ end.
+
+opt_propagate_store(_Current, _Obsolete, false) ->
+ ok;
+opt_propagate_store(Current, Obsolete, true) ->
+ propagate_store(Current, nodes, get_nodes(Obsolete)),
+ propagate_store(Current, friends, get_friends(Obsolete)).
+
+propagate_store(Store, Var, [Val | Vals]) ->
+ ?ets_insert(Store, {Var, Val}),
+ propagate_store(Store, Var, Vals);
+propagate_store(_Store, _Var, []) ->
+ ok.
+
+%% Tell all processes that are cooperating with the current transaction
+intercept_friends(_Tid, Ts) ->
+ Friends = get_friends(Ts#tidstore.store),
+ Message = {activity_ended, undefined, self()},
+ intercept_best_friend(Friends, Message).
+
+intercept_best_friend([], _Message) ->
+ ok;
+intercept_best_friend([Pid | _], Message) ->
+ Pid ! Message,
+ wait_for_best_friend(Pid, 0).
+
+wait_for_best_friend(Pid, Timeout) ->
+ receive
+ {'EXIT', Pid, _} -> ok;
+ {activity_ended, _, Pid} -> ok
+ after Timeout ->
+ case my_process_is_alive(Pid) of
+ true -> wait_for_best_friend(Pid, 1000);
+ false -> ok
+ end
+ end.
+
+my_process_is_alive(Pid) ->
+ case catch erlang:is_process_alive(Pid) of % New BIF in R5
+ true ->
+ true;
+ false ->
+ false;
+ {'EXIT', _} -> % Pre R5 backward compatibility
+ case process_info(Pid, message_queue_len) of
+ undefined -> false;
+ _ -> true
+ end
+ end.
+
+dirty(Protocol, Item) ->
+ {{Tab, Key}, _Val, _Op} = Item,
+ Tid = {dirty, self()},
+ Prep = prepare_items(Tid, Tab, Key, [Item], #prep{protocol= Protocol}),
+ CR = Prep#prep.records,
+ case Protocol of
+ async_dirty ->
+ %% Send commit records to the other involved nodes,
+ %% but do only wait for one node to complete.
+ %% Preferrably, the local node if possible.
+
+ ReadNode = val({Tab, where_to_read}),
+ {WaitFor, FirstRes} = async_send_dirty(Tid, CR, Tab, ReadNode),
+ rec_dirty(WaitFor, FirstRes);
+
+ sync_dirty ->
+ %% Send commit records to the other involved nodes,
+ %% and wait for all nodes to complete
+ {WaitFor, FirstRes} = sync_send_dirty(Tid, CR, Tab, []),
+ rec_dirty(WaitFor, FirstRes);
+ _ ->
+ mnesia:abort({bad_activity, Protocol})
+ end.
+
+%% This is the commit function, The first thing it does,
+%% is to find out which nodes that have been participating
+%% in this particular transaction, all of the mnesia_locker:lock*
+%% functions insert the names of the nodes where it aquires locks
+%% into the local shadow Store
+%% This function exacutes in the context of the user process
+t_commit(Type) ->
+ {Mod, Tid, Ts} = get(mnesia_activity_state),
+ Store = Ts#tidstore.store,
+ if
+ Ts#tidstore.level == 1 ->
+ intercept_friends(Tid, Ts),
+ %% N is number of updates
+ case arrange(Tid, Store, Type) of
+ {N, Prep} when N > 0 ->
+ multi_commit(Prep#prep.protocol,
+ Tid, Prep#prep.records, Store);
+ {0, Prep} ->
+ multi_commit(read_only, Tid, Prep#prep.records, Store)
+ end;
+ true ->
+ %% nested commit
+ Level = Ts#tidstore.level,
+ [Obsolete | Tail] = Ts#tidstore.up_stores,
+ req({del_store, Tid, Store, Obsolete, false}),
+ NewTs = Ts#tidstore{store = Store,
+ up_stores = Tail,
+ level = Level - 1},
+ NewTidTs = {Mod, Tid, NewTs},
+ put(mnesia_activity_state, NewTidTs),
+ do_commit_nested
+ end.
+
+%% This function arranges for all objects we shall write in S to be
+%% in a list of {Node, CommitRecord}
+%% Important function for the performance of mnesia.
+
+arrange(Tid, Store, Type) ->
+ %% The local node is always included
+ Nodes = get_nodes(Store),
+ Recs = prep_recs(Nodes, []),
+ Key = ?ets_first(Store),
+ N = 0,
+ Prep =
+ case Type of
+ async -> #prep{protocol = sym_trans, records = Recs};
+ sync -> #prep{protocol = sync_sym_trans, records = Recs}
+ end,
+ case catch do_arrange(Tid, Store, Key, Prep, N) of
+ {'EXIT', Reason} ->
+ dbg_out("do_arrange failed ~p ~p~n", [Reason, Tid]),
+ case Reason of
+ {aborted, R} ->
+ mnesia:abort(R);
+ _ ->
+ mnesia:abort(Reason)
+ end;
+ {New, Prepared} ->
+ {New, Prepared#prep{records = reverse(Prepared#prep.records)}}
+ end.
+
+reverse([]) ->
+ [];
+reverse([H|R]) when record(H, commit) ->
+ [
+ H#commit{
+ ram_copies = lists:reverse(H#commit.ram_copies),
+ disc_copies = lists:reverse(H#commit.disc_copies),
+ disc_only_copies = lists:reverse(H#commit.disc_only_copies),
+ snmp = lists:reverse(H#commit.snmp)
+ }
+ | reverse(R)].
+
+prep_recs([N | Nodes], Recs) ->
+ prep_recs(Nodes, [#commit{decision = presume_commit, node = N} | Recs]);
+prep_recs([], Recs) ->
+ Recs.
+
+%% storage_types is a list of {Node, Storage} tuples
+%% where each tuple represents an active replica
+do_arrange(Tid, Store, {Tab, Key}, Prep, N) ->
+ Oid = {Tab, Key},
+ Items = ?ets_lookup(Store, Oid), %% Store is a bag
+ P2 = prepare_items(Tid, Tab, Key, Items, Prep),
+ do_arrange(Tid, Store, ?ets_next(Store, Oid), P2, N + 1);
+do_arrange(Tid, Store, SchemaKey, Prep, N) when SchemaKey == op ->
+ Items = ?ets_lookup(Store, SchemaKey), %% Store is a bag
+ P2 = prepare_schema_items(Tid, Items, Prep),
+ do_arrange(Tid, Store, ?ets_next(Store, SchemaKey), P2, N + 1);
+do_arrange(Tid, Store, RestoreKey, Prep, N) when RestoreKey == restore_op ->
+ [{restore_op, R}] = ?ets_lookup(Store, RestoreKey),
+ Fun = fun({Tab, Key}, CommitRecs, _RecName, Where, Snmp) ->
+ Item = [{{Tab, Key}, {Tab, Key}, delete}],
+ do_prepare_items(Tid, Tab, Key, Where, Snmp, Item, CommitRecs);
+ (BupRec, CommitRecs, RecName, Where, Snmp) ->
+ Tab = element(1, BupRec),
+ Key = element(2, BupRec),
+ Item =
+ if
+ Tab == RecName ->
+ [{{Tab, Key}, BupRec, write}];
+ true ->
+ BupRec2 = setelement(1, BupRec, RecName),
+ [{{Tab, Key}, BupRec2, write}]
+ end,
+ do_prepare_items(Tid, Tab, Key, Where, Snmp, Item, CommitRecs)
+ end,
+ Recs2 = mnesia_schema:arrange_restore(R, Fun, Prep#prep.records),
+ P2 = Prep#prep{protocol = asym_trans, records = Recs2},
+ do_arrange(Tid, Store, ?ets_next(Store, RestoreKey), P2, N + 1);
+do_arrange(_Tid, _Store, '$end_of_table', Prep, N) ->
+ {N, Prep};
+do_arrange(Tid, Store, IgnoredKey, Prep, N) -> %% locks, nodes ... local atoms...
+ do_arrange(Tid, Store, ?ets_next(Store, IgnoredKey), Prep, N).
+
+%% Returns a prep record with all items in reverse order
+prepare_schema_items(Tid, Items, Prep) ->
+ Types = [{N, schema_ops} || N <- val({current, db_nodes})],
+ Recs = prepare_nodes(Tid, Types, Items, Prep#prep.records, schema),
+ Prep#prep{protocol = asym_trans, records = Recs}.
+
+%% Returns a prep record with all items in reverse order
+prepare_items(Tid, Tab, Key, Items, Prep) when Prep#prep.prev_tab == Tab ->
+ Types = Prep#prep.prev_types,
+ Snmp = Prep#prep.prev_snmp,
+ Recs = Prep#prep.records,
+ Recs2 = do_prepare_items(Tid, Tab, Key, Types, Snmp, Items, Recs),
+ Prep#prep{records = Recs2};
+
+prepare_items(Tid, Tab, Key, Items, Prep) ->
+ Types = val({Tab, where_to_commit}),
+ case Types of
+ [] -> mnesia:abort({no_exists, Tab});
+ {blocked, _} ->
+ unblocked = req({unblock_me, Tab}),
+ prepare_items(Tid, Tab, Key, Items, Prep);
+ _ ->
+ Snmp = val({Tab, snmp}),
+ Recs2 = do_prepare_items(Tid, Tab, Key, Types,
+ Snmp, Items, Prep#prep.records),
+ Prep2 = Prep#prep{records = Recs2, prev_tab = Tab,
+ prev_types = Types, prev_snmp = Snmp},
+ check_prep(Prep2, Types)
+ end.
+
+do_prepare_items(Tid, Tab, Key, Types, Snmp, Items, Recs) ->
+ Recs2 = prepare_snmp(Tid, Tab, Key, Types, Snmp, Items, Recs), % May exit
+ prepare_nodes(Tid, Types, Items, Recs2, normal).
+
+prepare_snmp(Tab, Key, Items) ->
+ case val({Tab, snmp}) of
+ [] ->
+ [];
+ Ustruct when Key /= '_' ->
+ {_Oid, _Val, Op} = hd(Items),
+ %% Still making snmp oid (not used) because we want to catch errors here
+ %% And also it keeps backwards comp. with old nodes.
+ SnmpOid = mnesia_snmp_hook:key_to_oid(Tab, Key, Ustruct), % May exit
+ [{Op, Tab, Key, SnmpOid}];
+ _ ->
+ [{clear_table, Tab}]
+ end.
+
+prepare_snmp(_Tid, _Tab, _Key, _Types, [], _Items, Recs) ->
+ Recs;
+
+prepare_snmp(Tid, Tab, Key, Types, Us, Items, Recs) ->
+ if Key /= '_' ->
+ {_Oid, _Val, Op} = hd(Items),
+ SnmpOid = mnesia_snmp_hook:key_to_oid(Tab, Key, Us), % May exit
+ prepare_nodes(Tid, Types, [{Op, Tab, Key, SnmpOid}], Recs, snmp);
+ Key == '_' ->
+ prepare_nodes(Tid, Types, [{clear_table, Tab}], Recs, snmp)
+ end.
+
+check_prep(Prep, Types) when Prep#prep.types == Types ->
+ Prep;
+check_prep(Prep, Types) when Prep#prep.types == undefined ->
+ Prep#prep{types = Types};
+check_prep(Prep, _Types) ->
+ Prep#prep{protocol = asym_trans}.
+
+%% Returns a list of commit records
+prepare_nodes(Tid, [{Node, Storage} | Rest], Items, C, Kind) ->
+ {Rec, C2} = pick_node(Tid, Node, C, []),
+ Rec2 = prepare_node(Node, Storage, Items, Rec, Kind),
+ [Rec2 | prepare_nodes(Tid, Rest, Items, C2, Kind)];
+prepare_nodes(_Tid, [], _Items, CommitRecords, _Kind) ->
+ CommitRecords.
+
+pick_node(Tid, Node, [Rec | Rest], Done) ->
+ if
+ Rec#commit.node == Node ->
+ {Rec, Done ++ Rest};
+ true ->
+ pick_node(Tid, Node, Rest, [Rec | Done])
+ end;
+pick_node(_Tid, Node, [], Done) ->
+ {#commit{decision = presume_commit, node = Node}, Done}.
+
+prepare_node(Node, Storage, [Item | Items], Rec, Kind) when Kind == snmp ->
+ Rec2 = Rec#commit{snmp = [Item | Rec#commit.snmp]},
+ prepare_node(Node, Storage, Items, Rec2, Kind);
+prepare_node(Node, Storage, [Item | Items], Rec, Kind) when Kind /= schema ->
+ Rec2 =
+ case Storage of
+ ram_copies ->
+ Rec#commit{ram_copies = [Item | Rec#commit.ram_copies]};
+ disc_copies ->
+ Rec#commit{disc_copies = [Item | Rec#commit.disc_copies]};
+ disc_only_copies ->
+ Rec#commit{disc_only_copies =
+ [Item | Rec#commit.disc_only_copies]}
+ end,
+ prepare_node(Node, Storage, Items, Rec2, Kind);
+prepare_node(_Node, _Storage, Items, Rec, Kind)
+ when Kind == schema, Rec#commit.schema_ops == [] ->
+ Rec#commit{schema_ops = Items};
+prepare_node(_Node, _Storage, [], Rec, _Kind) ->
+ Rec.
+
+%% multi_commit((Protocol, Tid, CommitRecords, Store)
+%% Local work is always performed in users process
+multi_commit(read_only, Tid, CR, _Store) ->
+ %% This featherweight commit protocol is used when no
+ %% updates has been performed in the transaction.
+
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ Msg = {Tid, simple_commit},
+ rpc:abcast(DiscNs -- [node()], ?MODULE, Msg),
+ rpc:abcast(RamNs -- [node()], ?MODULE, Msg),
+ mnesia_recover:note_decision(Tid, committed),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid},
+ do_commit;
+
+multi_commit(sym_trans, Tid, CR, Store) ->
+ %% This lightweight commit protocol is used when all
+ %% the involved tables are replicated symetrically.
+ %% Their storage types must match on each node.
+ %%
+ %% 1 Ask the other involved nodes if they want to commit
+ %% All involved nodes votes yes if they are up
+ %% 2a Somebody has voted no
+ %% Tell all yes voters to do_abort
+ %% 2b Everybody has voted yes
+ %% Tell everybody to do_commit. I.e. that they should
+ %% prepare the commit, log the commit record and
+ %% perform the updates.
+ %%
+ %% The outcome is kept 3 minutes in the transient decision table.
+ %%
+ %% Recovery:
+ %% If somebody dies before the coordinator has
+ %% broadcasted do_commit, the transaction is aborted.
+ %%
+ %% If a participant dies, the table load algorithm
+ %% ensures that the contents of the involved tables
+ %% are picked from another node.
+ %%
+ %% If the coordinator dies, each participants checks
+ %% the outcome with all the others. If all are uncertain
+ %% about the outcome, the transaction is aborted. If
+ %% somebody knows the outcome the others will follow.
+
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ ?ets_insert(Store, Pending),
+
+ {WaitFor, Local} = ask_commit(sym_trans, Tid, CR, DiscNs, RamNs),
+ {Outcome, []} = rec_all(WaitFor, Tid, do_commit, []),
+ ?eval_debug_fun({?MODULE, multi_commit_sym},
+ [{tid, Tid}, {outcome, Outcome}]),
+ rpc:abcast(DiscNs -- [node()], ?MODULE, {Tid, Outcome}),
+ rpc:abcast(RamNs -- [node()], ?MODULE, {Tid, Outcome}),
+ case Outcome of
+ do_commit ->
+ mnesia_recover:note_decision(Tid, committed),
+ do_dirty(Tid, Local),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid};
+ {do_abort, _Reason} ->
+ mnesia_recover:note_decision(Tid, aborted)
+ end,
+ ?eval_debug_fun({?MODULE, multi_commit_sym, post},
+ [{tid, Tid}, {outcome, Outcome}]),
+ Outcome;
+
+multi_commit(sync_sym_trans, Tid, CR, Store) ->
+ %% This protocol is the same as sym_trans except that it
+ %% uses syncronized calls to disk_log and syncronized commits
+ %% when several nodes are involved.
+
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ ?ets_insert(Store, Pending),
+
+ {WaitFor, Local} = ask_commit(sync_sym_trans, Tid, CR, DiscNs, RamNs),
+ {Outcome, []} = rec_all(WaitFor, Tid, do_commit, []),
+ ?eval_debug_fun({?MODULE, multi_commit_sym_sync},
+ [{tid, Tid}, {outcome, Outcome}]),
+ rpc:abcast(DiscNs -- [node()], ?MODULE, {Tid, Outcome}),
+ rpc:abcast(RamNs -- [node()], ?MODULE, {Tid, Outcome}),
+ case Outcome of
+ do_commit ->
+ mnesia_recover:note_decision(Tid, committed),
+ mnesia_log:slog(Local),
+ do_commit(Tid, Local),
+ %% Just wait for completion result is ignore.
+ rec_all(WaitFor, Tid, ignore, []),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid};
+ {do_abort, _Reason} ->
+ mnesia_recover:note_decision(Tid, aborted)
+ end,
+ ?eval_debug_fun({?MODULE, multi_commit_sym, post},
+ [{tid, Tid}, {outcome, Outcome}]),
+ Outcome;
+
+multi_commit(asym_trans, Tid, CR, Store) ->
+ %% This more expensive commit protocol is used when
+ %% table definitions are changed (schema transactions).
+ %% It is also used when the involved tables are
+ %% replicated asymetrically. If the storage type differs
+ %% on at least one node this protocol is used.
+ %%
+ %% 1 Ask the other involved nodes if they want to commit.
+ %% All involved nodes prepares the commit, logs a presume_abort
+ %% commit record and votes yes or no depending of the
+ %% outcome of the prepare. The preparation is also performed
+ %% by the coordinator.
+ %%
+ %% 2a Somebody has died or voted no
+ %% Tell all yes voters to do_abort
+ %% 2b Everybody has voted yes
+ %% Put a unclear marker in the log.
+ %% Tell the others to pre_commit. I.e. that they should
+ %% put a unclear marker in the log and reply
+ %% acc_pre_commit when they are done.
+ %%
+ %% 3a Somebody died
+ %% Tell the remaining participants to do_abort
+ %% 3b Everybody has replied acc_pre_commit
+ %% Tell everybody to committed. I.e that they should
+ %% put a committed marker in the log, perform the updates
+ %% and reply done_commit when they are done. The coordinator
+ %% must wait with putting his committed marker inte the log
+ %% until the committed has been sent to all the others.
+ %% Then he performs local commit before collecting replies.
+ %%
+ %% 4 Everybody has either died or replied done_commit
+ %% Return to the caller.
+ %%
+ %% Recovery:
+ %% If the coordinator dies, the participants (and
+ %% the coordinator when he starts again) must do
+ %% the following:
+ %%
+ %% If we have no unclear marker in the log we may
+ %% safely abort, since we know that nobody may have
+ %% decided to commit yet.
+ %%
+ %% If we have a committed marker in the log we may
+ %% safely commit since we know that everybody else
+ %% also will come to this conclusion.
+ %%
+ %% If we have a unclear marker but no committed
+ %% in the log we are uncertain about the real outcome
+ %% of the transaction and must ask the others before
+ %% we can decide what to do. If someone knows the
+ %% outcome we will do the same. If nobody knows, we
+ %% will wait for the remaining involved nodes to come
+ %% up. When all involved nodes are up and uncertain,
+ %% we decide to commit (first put a committed marker
+ %% in the log, then do the updates).
+
+ D = #decision{tid = Tid, outcome = presume_abort},
+ {D2, CR2} = commit_decision(D, CR, [], []),
+ DiscNs = D2#decision.disc_nodes,
+ RamNs = D2#decision.ram_nodes,
+ Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ ?ets_insert(Store, Pending),
+ {WaitFor, Local} = ask_commit(asym_trans, Tid, CR2, DiscNs, RamNs),
+ SchemaPrep = (catch mnesia_schema:prepare_commit(Tid, Local, {coord, WaitFor})),
+ {Votes, Pids} = rec_all(WaitFor, Tid, do_commit, []),
+
+ ?eval_debug_fun({?MODULE, multi_commit_asym_got_votes},
+ [{tid, Tid}, {votes, Votes}]),
+ case Votes of
+ do_commit ->
+ case SchemaPrep of
+ {_Modified, C, DumperMode} when record(C, commit) ->
+ mnesia_log:log(C), % C is not a binary
+ ?eval_debug_fun({?MODULE, multi_commit_asym_log_commit_rec},
+ [{tid, Tid}]),
+
+ D3 = C#commit.decision,
+ D4 = D3#decision{outcome = unclear},
+ mnesia_recover:log_decision(D4),
+ ?eval_debug_fun({?MODULE, multi_commit_asym_log_commit_dec},
+ [{tid, Tid}]),
+ tell_participants(Pids, {Tid, pre_commit}),
+ %% Now we are uncertain and we do not know
+ %% if all participants have logged that
+ %% they are uncertain or not
+ rec_acc_pre_commit(Pids, Tid, Store, C,
+ do_commit, DumperMode, [], []);
+ {'EXIT', Reason} ->
+ %% The others have logged the commit
+ %% record but they are not uncertain
+ mnesia_recover:note_decision(Tid, aborted),
+ ?eval_debug_fun({?MODULE, multi_commit_asym_prepare_exit},
+ [{tid, Tid}]),
+ tell_participants(Pids, {Tid, {do_abort, Reason}}),
+ do_abort(Tid, Local),
+ {do_abort, Reason}
+ end;
+
+ {do_abort, Reason} ->
+ %% The others have logged the commit
+ %% record but they are not uncertain
+ mnesia_recover:note_decision(Tid, aborted),
+ ?eval_debug_fun({?MODULE, multi_commit_asym_do_abort}, [{tid, Tid}]),
+ tell_participants(Pids, {Tid, {do_abort, Reason}}),
+ do_abort(Tid, Local),
+ {do_abort, Reason}
+ end.
+
+%% Returns do_commit or {do_abort, Reason}
+rec_acc_pre_commit([Pid | Tail], Tid, Store, Commit, Res, DumperMode,
+ GoodPids, SchemaAckPids) ->
+ receive
+ {?MODULE, _, {acc_pre_commit, Tid, Pid, true}} ->
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, Res, DumperMode,
+ [Pid | GoodPids], [Pid | SchemaAckPids]);
+
+ {?MODULE, _, {acc_pre_commit, Tid, Pid, false}} ->
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, Res, DumperMode,
+ [Pid | GoodPids], SchemaAckPids);
+
+ {?MODULE, _, {acc_pre_commit, Tid, Pid}} ->
+ %% Kept for backwards compatibility. Remove after Mnesia 4.x
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, Res, DumperMode,
+ [Pid | GoodPids], [Pid | SchemaAckPids]);
+
+ {mnesia_down, Node} when Node == node(Pid) ->
+ AbortRes = {do_abort, {bad_commit, Node}},
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, AbortRes, DumperMode,
+ GoodPids, SchemaAckPids)
+ end;
+rec_acc_pre_commit([], Tid, Store, Commit, Res, DumperMode, GoodPids, SchemaAckPids) ->
+ D = Commit#commit.decision,
+ case Res of
+ do_commit ->
+ %% Now everybody knows that the others
+ %% has voted yes. We also know that
+ %% everybody are uncertain.
+ prepare_sync_schema_commit(Store, SchemaAckPids),
+ tell_participants(GoodPids, {Tid, committed}),
+ D2 = D#decision{outcome = committed},
+ mnesia_recover:log_decision(D2),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_log_commit},
+ [{tid, Tid}]),
+
+ %% Now we have safely logged committed
+ %% and we can recover without asking others
+ do_commit(Tid, Commit, DumperMode),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_done_commit},
+ [{tid, Tid}]),
+ sync_schema_commit(Tid, Store, SchemaAckPids),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid};
+
+ {do_abort, Reason} ->
+ tell_participants(GoodPids, {Tid, {do_abort, Reason}}),
+ D2 = D#decision{outcome = aborted},
+ mnesia_recover:log_decision(D2),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_log_abort},
+ [{tid, Tid}]),
+ do_abort(Tid, Commit),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_done_abort},
+ [{tid, Tid}])
+ end,
+ Res.
+
+%% Note all nodes in case of mnesia_down mgt
+prepare_sync_schema_commit(_Store, []) ->
+ ok;
+prepare_sync_schema_commit(Store, [Pid | Pids]) ->
+ ?ets_insert(Store, {waiting_for_commit_ack, node(Pid)}),
+ prepare_sync_schema_commit(Store, Pids).
+
+sync_schema_commit(_Tid, _Store, []) ->
+ ok;
+sync_schema_commit(Tid, Store, [Pid | Tail]) ->
+ receive
+ {?MODULE, _, {schema_commit, Tid, Pid}} ->
+ ?ets_match_delete(Store, {waiting_for_commit_ack, node(Pid)}),
+ sync_schema_commit(Tid, Store, Tail);
+
+ {mnesia_down, Node} when Node == node(Pid) ->
+ ?ets_match_delete(Store, {waiting_for_commit_ack, Node}),
+ sync_schema_commit(Tid, Store, Tail)
+ end.
+
+tell_participants([Pid | Pids], Msg) ->
+ Pid ! Msg,
+ tell_participants(Pids, Msg);
+tell_participants([], _Msg) ->
+ ok.
+
+%% No need for trapping exits. We are only linked
+%% to mnesia_tm and if it dies we should also die.
+%% The same goes for disk_log and dets.
+commit_participant(Coord, Tid, Bin, DiscNs, RamNs) when binary(Bin) ->
+ Commit = binary_to_term(Bin),
+ commit_participant(Coord, Tid, Bin, Commit, DiscNs, RamNs);
+commit_participant(Coord, Tid, C, DiscNs, RamNs) when record(C, commit) ->
+ commit_participant(Coord, Tid, C, C, DiscNs, RamNs).
+
+commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
+ ?eval_debug_fun({?MODULE, commit_participant, pre}, [{tid, Tid}]),
+ case catch mnesia_schema:prepare_commit(Tid, C0, {part, Coord}) of
+ {Modified, C, DumperMode} when record(C, commit) ->
+ %% If we can not find any local unclear decision
+ %% we should presume abort at startup recovery
+ case lists:member(node(), DiscNs) of
+ false ->
+ ignore;
+ true ->
+ case Modified of
+ false -> mnesia_log:log(Bin);
+ true -> mnesia_log:log(C)
+ end
+ end,
+ ?eval_debug_fun({?MODULE, commit_participant, vote_yes},
+ [{tid, Tid}]),
+ reply(Coord, {vote_yes, Tid, self()}),
+
+ receive
+ {Tid, pre_commit} ->
+ D = C#commit.decision,
+ mnesia_recover:log_decision(D#decision{outcome = unclear}),
+ ?eval_debug_fun({?MODULE, commit_participant, pre_commit},
+ [{tid, Tid}]),
+ Expect_schema_ack = C#commit.schema_ops /= [],
+ reply(Coord, {acc_pre_commit, Tid, self(), Expect_schema_ack}),
+
+ %% Now we are vulnerable for failures, since
+ %% we cannot decide without asking others
+ receive
+ {Tid, committed} ->
+ mnesia_recover:log_decision(D#decision{outcome = committed}),
+ ?eval_debug_fun({?MODULE, commit_participant, log_commit},
+ [{tid, Tid}]),
+ do_commit(Tid, C, DumperMode),
+ case Expect_schema_ack of
+ false -> ignore;
+ true -> reply(Coord, {schema_commit, Tid, self()})
+ end,
+ ?eval_debug_fun({?MODULE, commit_participant, do_commit},
+ [{tid, Tid}]);
+
+ {Tid, {do_abort, _Reason}} ->
+ mnesia_recover:log_decision(D#decision{outcome = aborted}),
+ ?eval_debug_fun({?MODULE, commit_participant, log_abort},
+ [{tid, Tid}]),
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, undo_prepare},
+ [{tid, Tid}]);
+
+ {'EXIT', _, _} ->
+ mnesia_recover:log_decision(D#decision{outcome = aborted}),
+ ?eval_debug_fun({?MODULE, commit_participant, exit_log_abort},
+ [{tid, Tid}]),
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, exit_undo_prepare},
+ [{tid, Tid}]);
+
+ Msg ->
+ verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n",
+ [Tid, Msg])
+ end;
+ {Tid, {do_abort, _Reason}} ->
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, pre_commit_undo_prepare},
+ [{tid, Tid}]);
+
+ {'EXIT', _, _} ->
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, pre_commit_undo_prepare}, [{tid, Tid}]);
+
+ Msg ->
+ verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n",
+ [Tid, Msg])
+ end;
+
+ {'EXIT', Reason} ->
+ ?eval_debug_fun({?MODULE, commit_participant, vote_no},
+ [{tid, Tid}]),
+ reply(Coord, {vote_no, Tid, Reason}),
+ mnesia_schema:undo_prepare_commit(Tid, C0)
+ end,
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid},
+ unlink(whereis(?MODULE)),
+ exit(normal).
+
+do_abort(Tid, Bin) when binary(Bin) ->
+ %% Possible optimization:
+ %% If we want we could pass arround a flag
+ %% that tells us whether the binary contains
+ %% schema ops or not. Only if the binary
+ %% contains schema ops there are meningful
+ %% unpack the binary and perform
+ %% mnesia_schema:undo_prepare_commit/1.
+ do_abort(Tid, binary_to_term(Bin));
+do_abort(Tid, Commit) ->
+ mnesia_schema:undo_prepare_commit(Tid, Commit),
+ Commit.
+
+do_dirty(Tid, Commit) when Commit#commit.schema_ops == [] ->
+ mnesia_log:log(Commit),
+ do_commit(Tid, Commit).
+
+%% do_commit(Tid, CommitRecord)
+do_commit(Tid, Bin) when binary(Bin) ->
+ do_commit(Tid, binary_to_term(Bin));
+do_commit(Tid, C) ->
+ do_commit(Tid, C, optional).
+do_commit(Tid, Bin, DumperMode) when binary(Bin) ->
+ do_commit(Tid, binary_to_term(Bin), DumperMode);
+do_commit(Tid, C, DumperMode) ->
+ mnesia_dumper:update(Tid, C#commit.schema_ops, DumperMode),
+ R = do_snmp(Tid, C#commit.snmp),
+ R2 = do_update(Tid, ram_copies, C#commit.ram_copies, R),
+ R3 = do_update(Tid, disc_copies, C#commit.disc_copies, R2),
+ do_update(Tid, disc_only_copies, C#commit.disc_only_copies, R3).
+
+%% Update the items
+do_update(Tid, Storage, [Op | Ops], OldRes) ->
+ case catch do_update_op(Tid, Storage, Op) of
+ ok ->
+ do_update(Tid, Storage, Ops, OldRes);
+ {'EXIT', Reason} ->
+ %% This may only happen when we recently have
+ %% deleted our local replica, changed storage_type
+ %% or transformed table
+ %% BUGBUG: Updates may be lost if storage_type is changed.
+ %% Determine actual storage type and try again.
+ %% BUGBUG: Updates may be lost if table is transformed.
+
+ verbose("do_update in ~w failed: ~p -> {'EXIT', ~p}~n",
+ [Tid, Op, Reason]),
+ do_update(Tid, Storage, Ops, OldRes);
+ NewRes ->
+ do_update(Tid, Storage, Ops, NewRes)
+ end;
+do_update(_Tid, _Storage, [], Res) ->
+ Res.
+
+do_update_op(Tid, Storage, {{Tab, K}, Obj, write}) ->
+ commit_write(?catch_val({Tab, commit_work}), Tid,
+ Tab, K, Obj, undefined),
+ mnesia_lib:db_put(Storage, Tab, Obj);
+
+do_update_op(Tid, Storage, {{Tab, K}, Val, delete}) ->
+ commit_delete(?catch_val({Tab, commit_work}), Tid, Tab, K, Val, undefined),
+ mnesia_lib:db_erase(Storage, Tab, K);
+
+do_update_op(Tid, Storage, {{Tab, K}, {RecName, Incr}, update_counter}) ->
+ {NewObj, OldObjs} =
+ case catch mnesia_lib:db_update_counter(Storage, Tab, K, Incr) of
+ NewVal when integer(NewVal), NewVal >= 0 ->
+ {{RecName, K, NewVal}, [{RecName, K, NewVal - Incr}]};
+ _ ->
+ Zero = {RecName, K, 0},
+ mnesia_lib:db_put(Storage, Tab, Zero),
+ {Zero, []}
+ end,
+ commit_update(?catch_val({Tab, commit_work}), Tid, Tab,
+ K, NewObj, OldObjs),
+ element(3, NewObj);
+
+do_update_op(Tid, Storage, {{Tab, Key}, Obj, delete_object}) ->
+ commit_del_object(?catch_val({Tab, commit_work}),
+ Tid, Tab, Key, Obj, undefined),
+ mnesia_lib:db_match_erase(Storage, Tab, Obj);
+
+do_update_op(Tid, Storage, {{Tab, Key}, Obj, clear_table}) ->
+ commit_clear(?catch_val({Tab, commit_work}), Tid, Tab, Key, Obj),
+ mnesia_lib:db_match_erase(Storage, Tab, Obj).
+
+commit_write([], _, _, _, _, _) -> ok;
+commit_write([{checkpoints, CpList}|R], Tid, Tab, K, Obj, Old) ->
+ mnesia_checkpoint:tm_retain(Tid, Tab, K, write, CpList),
+ commit_write(R, Tid, Tab, K, Obj, Old);
+commit_write([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, write, Old),
+ commit_write(R, Tid, Tab, K, Obj, Old);
+commit_write([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:add_index(H, Tab, K, Obj, Old),
+ commit_write(R, Tid, Tab, K, Obj, Old).
+
+commit_update([], _, _, _, _, _) -> ok;
+commit_update([{checkpoints, CpList}|R], Tid, Tab, K, Obj, _) ->
+ Old = mnesia_checkpoint:tm_retain(Tid, Tab, K, write, CpList),
+ commit_update(R, Tid, Tab, K, Obj, Old);
+commit_update([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, write, Old),
+ commit_update(R, Tid, Tab, K, Obj, Old);
+commit_update([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:add_index(H, Tab, K, Obj, Old),
+ commit_update(R, Tid, Tab, K, Obj, Old).
+
+commit_delete([], _, _, _, _, _) -> ok;
+commit_delete([{checkpoints, CpList}|R], Tid, Tab, K, Obj, _) ->
+ Old = mnesia_checkpoint:tm_retain(Tid, Tab, K, delete, CpList),
+ commit_delete(R, Tid, Tab, K, Obj, Old);
+commit_delete([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, delete, Old),
+ commit_delete(R, Tid, Tab, K, Obj, Old);
+commit_delete([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:delete_index(H, Tab, K),
+ commit_delete(R, Tid, Tab, K, Obj, Old).
+
+commit_del_object([], _, _, _, _, _) -> ok;
+commit_del_object([{checkpoints, CpList}|R], Tid, Tab, K, Obj, _) ->
+ Old = mnesia_checkpoint:tm_retain(Tid, Tab, K, delete_object, CpList),
+ commit_del_object(R, Tid, Tab, K, Obj, Old);
+commit_del_object([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, delete_object, Old),
+ commit_del_object(R, Tid, Tab, K, Obj, Old);
+commit_del_object([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:del_object_index(H, Tab, K, Obj, Old),
+ commit_del_object(R, Tid, Tab, K, Obj, Old).
+
+commit_clear([], _, _, _, _) -> ok;
+commit_clear([{checkpoints, CpList}|R], Tid, Tab, K, Obj) ->
+ mnesia_checkpoint:tm_retain(Tid, Tab, K, clear_table, CpList),
+ commit_clear(R, Tid, Tab, K, Obj);
+commit_clear([H|R], Tid, Tab, K, Obj)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, clear_table, undefined),
+ commit_clear(R, Tid, Tab, K, Obj);
+commit_clear([H|R], Tid, Tab, K, Obj)
+ when element(1, H) == index ->
+ mnesia_index:clear_index(H, Tab, K, Obj),
+ commit_clear(R, Tid, Tab, K, Obj).
+
+do_snmp(_, []) -> ok;
+do_snmp(Tid, [Head | Tail]) ->
+ case catch mnesia_snmp_hook:update(Head) of
+ {'EXIT', Reason} ->
+ %% This should only happen when we recently have
+ %% deleted our local replica or recently deattached
+ %% the snmp table
+
+ verbose("do_snmp in ~w failed: ~p -> {'EXIT', ~p}~n",
+ [Tid, Head, Reason]);
+ ok ->
+ ignore
+ end,
+ do_snmp(Tid, Tail).
+
+commit_nodes([C | Tail], AccD, AccR)
+ when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [],
+ C#commit.schema_ops == [] ->
+ commit_nodes(Tail, AccD, [C#commit.node | AccR]);
+commit_nodes([C | Tail], AccD, AccR) ->
+ commit_nodes(Tail, [C#commit.node | AccD], AccR);
+commit_nodes([], AccD, AccR) ->
+ {AccD, AccR}.
+
+commit_decision(D, [C | Tail], AccD, AccR) ->
+ N = C#commit.node,
+ {D2, Tail2} =
+ case C#commit.schema_ops of
+ [] when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [] ->
+ commit_decision(D, Tail, AccD, [N | AccR]);
+ [] ->
+ commit_decision(D, Tail, [N | AccD], AccR);
+ Ops ->
+ case ram_only_ops(N, Ops) of
+ true ->
+ commit_decision(D, Tail, AccD, [N | AccR]);
+ false ->
+ commit_decision(D, Tail, [N | AccD], AccR)
+ end
+ end,
+ {D2, [C#commit{decision = D2} | Tail2]};
+commit_decision(D, [], AccD, AccR) ->
+ {D#decision{disc_nodes = AccD, ram_nodes = AccR}, []}.
+
+ram_only_ops(N, [{op, change_table_copy_type, N, _FromS, _ToS, Cs} | _Ops ]) ->
+ case lists:member({name, schema}, Cs) of
+ true ->
+ %% We always use disk if change type of the schema
+ false;
+ false ->
+ not lists:member(N, val({schema, disc_copies}))
+ end;
+
+ram_only_ops(N, _Ops) ->
+ not lists:member(N, val({schema, disc_copies})).
+
+%% Returns {WaitFor, Res}
+sync_send_dirty(Tid, [Head | Tail], Tab, WaitFor) ->
+ Node = Head#commit.node,
+ if
+ Node == node() ->
+ {WF, _} = sync_send_dirty(Tid, Tail, Tab, WaitFor),
+ Res = do_dirty(Tid, Head),
+ {WF, Res};
+ true ->
+ {?MODULE, Node} ! {self(), {sync_dirty, Tid, Head, Tab}},
+ sync_send_dirty(Tid, Tail, Tab, [Node | WaitFor])
+ end;
+sync_send_dirty(_Tid, [], _Tab, WaitFor) ->
+ {WaitFor, {'EXIT', {aborted, {node_not_running, WaitFor}}}}.
+
+%% Returns {WaitFor, Res}
+async_send_dirty(_Tid, _Nodes, Tab, nowhere) ->
+ {[], {'EXIT', {aborted, {no_exists, Tab}}}};
+async_send_dirty(Tid, Nodes, Tab, ReadNode) ->
+ async_send_dirty(Tid, Nodes, Tab, ReadNode, [], ok).
+
+async_send_dirty(Tid, [Head | Tail], Tab, ReadNode, WaitFor, Res) ->
+ Node = Head#commit.node,
+ if
+ ReadNode == Node, Node == node() ->
+ NewRes = do_dirty(Tid, Head),
+ async_send_dirty(Tid, Tail, Tab, ReadNode, WaitFor, NewRes);
+ ReadNode == Node ->
+ {?MODULE, Node} ! {self(), {sync_dirty, Tid, Head, Tab}},
+ NewRes = {'EXIT', {aborted, {node_not_running, Node}}},
+ async_send_dirty(Tid, Tail, Tab, ReadNode, [Node | WaitFor], NewRes);
+ true ->
+ {?MODULE, Node} ! {self(), {async_dirty, Tid, Head, Tab}},
+ async_send_dirty(Tid, Tail, Tab, ReadNode, WaitFor, Res)
+ end;
+async_send_dirty(_Tid, [], _Tab, _ReadNode, WaitFor, Res) ->
+ {WaitFor, Res}.
+
+rec_dirty([Node | Tail], Res) when Node /= node() ->
+ NewRes = get_dirty_reply(Node, Res),
+ rec_dirty(Tail, NewRes);
+rec_dirty([], Res) ->
+ Res.
+
+get_dirty_reply(Node, Res) ->
+ receive
+ {?MODULE, Node, {'EXIT', Reason}} ->
+ {'EXIT', {aborted, {badarg, Reason}}};
+ {?MODULE, Node, {dirty_res, ok}} ->
+ case Res of
+ {'EXIT', {aborted, {node_not_running, _Node}}} ->
+ ok;
+ _ ->
+ %% Prioritize bad results, but node_not_running
+ Res
+ end;
+ {?MODULE, Node, {dirty_res, Reply}} ->
+ Reply;
+ {mnesia_down, Node} ->
+ %% It's ok to ignore mnesia_down's
+ %% since we will make the replicas
+ %% consistent again when Node is started
+ Res
+ after 1000 ->
+ case lists:member(Node, val({current, db_nodes})) of
+ true ->
+ get_dirty_reply(Node, Res);
+ false ->
+ Res
+ end
+ end.
+
+%% Assume that CommitRecord is no binary
+%% Return {Res, Pids}
+ask_commit(Protocol, Tid, CR, DiscNs, RamNs) ->
+ ask_commit(Protocol, Tid, CR, DiscNs, RamNs, [], no_local).
+
+ask_commit(Protocol, Tid, [Head | Tail], DiscNs, RamNs, WaitFor, Local) ->
+ Node = Head#commit.node,
+ if
+ Node == node() ->
+ ask_commit(Protocol, Tid, Tail, DiscNs, RamNs, WaitFor, Head);
+ true ->
+ Bin = opt_term_to_binary(Protocol, Head, DiscNs++RamNs),
+ Msg = {ask_commit, Protocol, Tid, Bin, DiscNs, RamNs},
+ {?MODULE, Node} ! {self(), Msg},
+ ask_commit(Protocol, Tid, Tail, DiscNs, RamNs, [Node | WaitFor], Local)
+ end;
+ask_commit(_Protocol, _Tid, [], _DiscNs, _RamNs, WaitFor, Local) ->
+ {WaitFor, Local}.
+
+opt_term_to_binary(asym_trans, Head, Nodes) ->
+ opt_term_to_binary(Nodes, Head);
+opt_term_to_binary(_Protocol, Head, _Nodes) ->
+ Head.
+
+opt_term_to_binary([], Head) ->
+ term_to_binary(Head);
+opt_term_to_binary([H|R], Head) ->
+ case mnesia_monitor:needs_protocol_conversion(H) of
+ true -> Head;
+ false ->
+ opt_term_to_binary(R, Head)
+ end.
+
+rec_all([Node | Tail], Tid, Res, Pids) ->
+ receive
+ {?MODULE, Node, {vote_yes, Tid}} ->
+ rec_all(Tail, Tid, Res, Pids);
+ {?MODULE, Node, {vote_yes, Tid, Pid}} ->
+ rec_all(Tail, Tid, Res, [Pid | Pids]);
+ {?MODULE, Node, {vote_no, Tid, Reason}} ->
+ rec_all(Tail, Tid, {do_abort, Reason}, Pids);
+ {?MODULE, Node, {committed, Tid}} ->
+ rec_all(Tail, Tid, Res, Pids);
+ {?MODULE, Node, {aborted, Tid}} ->
+ rec_all(Tail, Tid, Res, Pids);
+
+ {mnesia_down, Node} ->
+ rec_all(Tail, Tid, {do_abort, {bad_commit, Node}}, Pids)
+ end;
+rec_all([], _Tid, Res, Pids) ->
+ {Res, Pids}.
+
+get_transactions() ->
+ {info, Participant, Coordinator} = req(info),
+ lists:map(fun({Tid, _Tabs}) ->
+ Status = tr_status(Tid,Participant),
+ {Tid#tid.counter, Tid#tid.pid, Status}
+ end,Coordinator).
+
+tr_status(Tid,Participant) ->
+ case lists:keymember(Tid, 1, Participant) of
+ true -> participant;
+ false -> coordinator
+ end.
+
+get_info(Timeout) ->
+ case whereis(?MODULE) of
+ undefined ->
+ {timeout, Timeout};
+ Pid ->
+ Pid ! {self(), info},
+ receive
+ {?MODULE, _, {info, Part, Coord}} ->
+ {info, Part, Coord}
+ after Timeout ->
+ {timeout, Timeout}
+ end
+ end.
+
+display_info(Stream, {timeout, T}) ->
+ io:format(Stream, "---> No info about coordinator and participant transactions, "
+ "timeout ~p <--- ~n", [T]);
+
+display_info(Stream, {info, Part, Coord}) ->
+ io:format(Stream, "---> Participant transactions <--- ~n", []),
+ lists:foreach(fun(P) -> pr_participant(Stream, P) end, Part),
+ io:format(Stream, "---> Coordinator transactions <---~n", []),
+ lists:foreach(fun({Tid, _Tabs}) -> pr_tid(Stream, Tid) end, Coord).
+
+pr_participant(Stream, P) ->
+ Commit0 = P#participant.commit,
+ Commit =
+ if
+ binary(Commit0) -> binary_to_term(Commit0);
+ true -> Commit0
+ end,
+ pr_tid(Stream, P#participant.tid),
+ io:format(Stream, "with participant objects ~p~n", [Commit]).
+
+
+pr_tid(Stream, Tid) ->
+ io:format(Stream, "Tid: ~p (owned by ~p) ~n",
+ [Tid#tid.counter, Tid#tid.pid]).
+
+info(Serial) ->
+ io:format( "Info about transaction with serial == ~p~n", [Serial]),
+ {info, Participant, Trs} = req(info),
+ search_pr_participant(Serial, Participant),
+ search_pr_coordinator(Serial, Trs).
+
+
+search_pr_coordinator(_S, []) -> no;
+search_pr_coordinator(S, [{Tid, _Ts}|Tail]) ->
+ case Tid#tid.counter of
+ S ->
+ io:format( "Tid is coordinator, owner == \n", []),
+ display_pid_info(Tid#tid.pid),
+ search_pr_coordinator(S, Tail);
+ _ ->
+ search_pr_coordinator(S, Tail)
+ end.
+
+search_pr_participant(_S, []) ->
+ false;
+search_pr_participant(S, [ P | Tail]) ->
+ Tid = P#participant.tid,
+ Commit0 = P#participant.commit,
+ if
+ Tid#tid.counter == S ->
+ io:format( "Tid is participant to commit, owner == \n", []),
+ Pid = Tid#tid.pid,
+ display_pid_info(Pid),
+ io:format( "Tid wants to write objects \n",[]),
+ Commit =
+ if
+ binary(Commit0) -> binary_to_term(Commit0);
+ true -> Commit0
+ end,
+
+ io:format("~p~n", [Commit]),
+ search_pr_participant(S,Tail); %% !!!!!
+ true ->
+ search_pr_participant(S, Tail)
+ end.
+
+display_pid_info(Pid) ->
+ case rpc:pinfo(Pid) of
+ undefined ->
+ io:format( "Dead process \n");
+ Info ->
+ Call = fetch(initial_call, Info),
+ Curr = case fetch(current_function, Info) of
+ {Mod,F,Args} when list(Args) ->
+ {Mod,F,length(Args)};
+ Other ->
+ Other
+ end,
+ Reds = fetch(reductions, Info),
+ LM = length(fetch(messages, Info)),
+ pformat(io_lib:format("~p", [Pid]),
+ io_lib:format("~p", [Call]),
+ io_lib:format("~p", [Curr]), Reds, LM)
+ end.
+
+pformat(A1, A2, A3, A4, A5) ->
+ io:format( "~-12s ~-21s ~-21s ~9w ~4w~n", [A1,A2,A3,A4,A5]).
+
+fetch(Key, Info) ->
+ case lists:keysearch(Key, 1, Info) of
+ {value, {_, Val}} ->
+ Val;
+ _ ->
+ 0
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%% reconfigure stuff comes here ......
+%%%%%%%%%%%%%%%%%%%%%
+
+reconfigure_coordinators(N, [{Tid, [Store | _]} | Coordinators]) ->
+ case mnesia_recover:outcome(Tid, unknown) of
+ committed ->
+ WaitingNodes = ?ets_lookup(Store, waiting_for_commit_ack),
+ case lists:keymember(N, 2, WaitingNodes) of
+ false ->
+ ignore; % avoid spurious mnesia_down messages
+ true ->
+ send_mnesia_down(Tid, Store, N)
+ end;
+ aborted ->
+ ignore; % avoid spurious mnesia_down messages
+ _ ->
+ %% Tell the coordinator about the mnesia_down
+ send_mnesia_down(Tid, Store, N)
+ end,
+ reconfigure_coordinators(N, Coordinators);
+reconfigure_coordinators(_N, []) ->
+ ok.
+
+send_mnesia_down(Tid, Store, Node) ->
+ Msg = {mnesia_down, Node},
+ send_to_pids([Tid#tid.pid | get_friends(Store)], Msg).
+
+send_to_pids([Pid | Pids], Msg) ->
+ Pid ! Msg,
+ send_to_pids(Pids, Msg);
+send_to_pids([], _Msg) ->
+ ok.
+
+reconfigure_participants(N, [P | Tail]) ->
+ case lists:member(N, P#participant.disc_nodes) or
+ lists:member(N, P#participant.ram_nodes) of
+ false ->
+ %% Ignore, since we are not a participant
+ %% in the transaction.
+ reconfigure_participants(N, Tail);
+
+ true ->
+ %% We are on a participant node, lets
+ %% check if the dead one was a
+ %% participant or a coordinator.
+ Tid = P#participant.tid,
+ if
+ node(Tid#tid.pid) /= N ->
+ %% Another participant node died. Ignore.
+ reconfigure_participants(N, Tail);
+
+ true ->
+ %% The coordinator node has died and
+ %% we must determine the outcome of the
+ %% transaction and tell mnesia_tm on all
+ %% nodes (including the local node) about it
+ verbose("Coordinator ~p in transaction ~p died~n",
+ [Tid#tid.pid, Tid]),
+
+ Nodes = P#participant.disc_nodes ++
+ P#participant.ram_nodes,
+ AliveNodes = Nodes -- [N],
+ Protocol = P#participant.protocol,
+ tell_outcome(Tid, Protocol, N, AliveNodes, AliveNodes),
+ reconfigure_participants(N, Tail)
+ end
+ end;
+reconfigure_participants(_, []) ->
+ [].
+
+%% We need to determine the outcome of the transaction and
+%% tell mnesia_tm on all involved nodes (including the local node)
+%% about the outcome.
+tell_outcome(Tid, Protocol, Node, CheckNodes, TellNodes) ->
+ Outcome = mnesia_recover:what_happened(Tid, Protocol, CheckNodes),
+ case Outcome of
+ aborted ->
+ rpc:abcast(TellNodes, ?MODULE, {Tid,{do_abort, {mnesia_down, Node}}});
+ committed ->
+ rpc:abcast(TellNodes, ?MODULE, {Tid, do_commit})
+ end,
+ Outcome.
+
+do_stop(#state{coordinators = Coordinators}) ->
+ Msg = {mnesia_down, node()},
+ lists:foreach(fun({Tid, _}) -> Tid#tid.pid ! Msg end, Coordinators),
+ mnesia_checkpoint:stop(),
+ mnesia_log:stop(),
+ exit(shutdown).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, State) ->
+ doit_loop(State).
+
+system_terminate(_Reason, _Parent, _Debug, State) ->
+ do_stop(State).
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.
diff --git a/lib/dialyzer/test/race_tests_SUITE.erl b/lib/dialyzer/test/race_tests_SUITE.erl
new file mode 100644
index 0000000000..cfc898d464
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE.erl
@@ -0,0 +1,799 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(race_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([race_tests_SUITE_consistency/1, ets_insert_args1/1,
+ ets_insert_args2/1, ets_insert_args3/1, ets_insert_args4/1,
+ ets_insert_args5/1, ets_insert_args6/1, ets_insert_args7/1,
+ ets_insert_args8/1, ets_insert_control_flow1/1,
+ ets_insert_control_flow2/1, ets_insert_control_flow3/1,
+ ets_insert_control_flow4/1, ets_insert_control_flow5/1,
+ ets_insert_diff_atoms_race1/1, ets_insert_diff_atoms_race2/1,
+ ets_insert_diff_atoms_race3/1, ets_insert_diff_atoms_race4/1,
+ ets_insert_diff_atoms_race5/1, ets_insert_diff_atoms_race6/1,
+ ets_insert_double1/1, ets_insert_double2/1, ets_insert_funs1/1,
+ ets_insert_funs2/1, ets_insert_new/1, ets_insert_param/1,
+ extract_translations/1, mnesia_diff_atoms_race1/1,
+ mnesia_diff_atoms_race2/1, mnesia_dirty_read_one_write_two/1,
+ mnesia_dirty_read_two_write_one/1,
+ mnesia_dirty_read_write_double1/1,
+ mnesia_dirty_read_write_double2/1,
+ mnesia_dirty_read_write_double3/1,
+ mnesia_dirty_read_write_double4/1, mnesia_dirty_read_write_one/1,
+ mnesia_dirty_read_write_two/1, whereis_control_flow1/1,
+ whereis_control_flow2/1, whereis_control_flow3/1,
+ whereis_control_flow4/1, whereis_control_flow5/1,
+ whereis_control_flow6/1, whereis_diff_atoms_no_race/1,
+ whereis_diff_atoms_race/1, whereis_diff_functions1/1,
+ whereis_diff_functions1_nested/1,
+ whereis_diff_functions1_pathsens/1,
+ whereis_diff_functions1_twice/1, whereis_diff_functions2/1,
+ whereis_diff_functions2_nested/1,
+ whereis_diff_functions2_pathsens/1,
+ whereis_diff_functions2_twice/1, whereis_diff_functions3/1,
+ whereis_diff_functions3_nested/1,
+ whereis_diff_functions3_pathsens/1, whereis_diff_functions4/1,
+ whereis_diff_functions5/1, whereis_diff_functions6/1,
+ whereis_diff_modules1/1, whereis_diff_modules1_pathsens/1,
+ whereis_diff_modules1_rec/1, whereis_diff_modules2/1,
+ whereis_diff_modules2_pathsens/1, whereis_diff_modules2_rec/1,
+ whereis_diff_modules3/1, whereis_diff_modules_nested/1,
+ whereis_diff_modules_twice/1, whereis_diff_vars_no_race/1,
+ whereis_diff_vars_race/1, whereis_intra_inter_module1/1,
+ whereis_intra_inter_module2/1, whereis_intra_inter_module3/1,
+ whereis_intra_inter_module4/1, whereis_intra_inter_module5/1,
+ whereis_intra_inter_module6/1, whereis_intra_inter_module7/1,
+ whereis_intra_inter_module8/1, whereis_param/1,
+ whereis_param_inter_module/1, whereis_rec_function1/1,
+ whereis_rec_function2/1, whereis_rec_function3/1,
+ whereis_rec_function4/1, whereis_rec_function5/1,
+ whereis_rec_function6/1, whereis_rec_function7/1,
+ whereis_rec_function8/1, whereis_try_catch/1, whereis_vars1/1,
+ whereis_vars10/1, whereis_vars11/1, whereis_vars12/1,
+ whereis_vars13/1, whereis_vars14/1, whereis_vars15/1,
+ whereis_vars16/1, whereis_vars17/1, whereis_vars18/1,
+ whereis_vars19/1, whereis_vars2/1, whereis_vars20/1,
+ whereis_vars21/1, whereis_vars22/1, whereis_vars3/1,
+ whereis_vars4/1, whereis_vars5/1, whereis_vars6/1,
+ whereis_vars7/1, whereis_vars8/1, whereis_vars9/1]).
+
+suite() ->
+ [{timetrap, {minutes, 1}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, [{warnings,[race_conditions]}]}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [race_tests_SUITE_consistency,ets_insert_args1,ets_insert_args2,
+ ets_insert_args3,ets_insert_args4,ets_insert_args5,ets_insert_args6,
+ ets_insert_args7,ets_insert_args8,ets_insert_control_flow1,
+ ets_insert_control_flow2,ets_insert_control_flow3,ets_insert_control_flow4,
+ ets_insert_control_flow5,ets_insert_diff_atoms_race1,
+ ets_insert_diff_atoms_race2,ets_insert_diff_atoms_race3,
+ ets_insert_diff_atoms_race4,ets_insert_diff_atoms_race5,
+ ets_insert_diff_atoms_race6,ets_insert_double1,ets_insert_double2,
+ ets_insert_funs1,ets_insert_funs2,ets_insert_new,ets_insert_param,
+ extract_translations,mnesia_diff_atoms_race1,mnesia_diff_atoms_race2,
+ mnesia_dirty_read_one_write_two,mnesia_dirty_read_two_write_one,
+ mnesia_dirty_read_write_double1,mnesia_dirty_read_write_double2,
+ mnesia_dirty_read_write_double3,mnesia_dirty_read_write_double4,
+ mnesia_dirty_read_write_one,mnesia_dirty_read_write_two,
+ whereis_control_flow1,whereis_control_flow2,whereis_control_flow3,
+ whereis_control_flow4,whereis_control_flow5,whereis_control_flow6,
+ whereis_diff_atoms_no_race,whereis_diff_atoms_race,whereis_diff_functions1,
+ whereis_diff_functions1_nested,whereis_diff_functions1_pathsens,
+ whereis_diff_functions1_twice,whereis_diff_functions2,
+ whereis_diff_functions2_nested,whereis_diff_functions2_pathsens,
+ whereis_diff_functions2_twice,whereis_diff_functions3,
+ whereis_diff_functions3_nested,whereis_diff_functions3_pathsens,
+ whereis_diff_functions4,whereis_diff_functions5,whereis_diff_functions6,
+ whereis_diff_modules1,whereis_diff_modules1_pathsens,
+ whereis_diff_modules1_rec,whereis_diff_modules2,
+ whereis_diff_modules2_pathsens,whereis_diff_modules2_rec,
+ whereis_diff_modules3,whereis_diff_modules_nested,
+ whereis_diff_modules_twice,whereis_diff_vars_no_race,
+ whereis_diff_vars_race,whereis_intra_inter_module1,
+ whereis_intra_inter_module2,whereis_intra_inter_module3,
+ whereis_intra_inter_module4,whereis_intra_inter_module5,
+ whereis_intra_inter_module6,whereis_intra_inter_module7,
+ whereis_intra_inter_module8,whereis_param,whereis_param_inter_module,
+ whereis_rec_function1,whereis_rec_function2,whereis_rec_function3,
+ whereis_rec_function4,whereis_rec_function5,whereis_rec_function6,
+ whereis_rec_function7,whereis_rec_function8,whereis_try_catch,
+ whereis_vars1,whereis_vars10,whereis_vars11,whereis_vars12,whereis_vars13,
+ whereis_vars14,whereis_vars15,whereis_vars16,whereis_vars17,whereis_vars18,
+ whereis_vars19,whereis_vars2,whereis_vars20,whereis_vars21,whereis_vars22,
+ whereis_vars3,whereis_vars4,whereis_vars5,whereis_vars6,whereis_vars7,
+ whereis_vars8,whereis_vars9].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+race_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+ets_insert_args1(Config) ->
+ case dialyze(Config, ets_insert_args1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_args2(Config) ->
+ case dialyze(Config, ets_insert_args2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_args3(Config) ->
+ case dialyze(Config, ets_insert_args3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_args4(Config) ->
+ case dialyze(Config, ets_insert_args4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_args5(Config) ->
+ case dialyze(Config, ets_insert_args5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_args6(Config) ->
+ case dialyze(Config, ets_insert_args6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_args7(Config) ->
+ case dialyze(Config, ets_insert_args7) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_args8(Config) ->
+ case dialyze(Config, ets_insert_args8) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_control_flow1(Config) ->
+ case dialyze(Config, ets_insert_control_flow1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_control_flow2(Config) ->
+ case dialyze(Config, ets_insert_control_flow2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_control_flow3(Config) ->
+ case dialyze(Config, ets_insert_control_flow3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_control_flow4(Config) ->
+ case dialyze(Config, ets_insert_control_flow4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_control_flow5(Config) ->
+ case dialyze(Config, ets_insert_control_flow5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_diff_atoms_race1(Config) ->
+ case dialyze(Config, ets_insert_diff_atoms_race1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_diff_atoms_race2(Config) ->
+ case dialyze(Config, ets_insert_diff_atoms_race2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_diff_atoms_race3(Config) ->
+ case dialyze(Config, ets_insert_diff_atoms_race3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_diff_atoms_race4(Config) ->
+ case dialyze(Config, ets_insert_diff_atoms_race4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_diff_atoms_race5(Config) ->
+ case dialyze(Config, ets_insert_diff_atoms_race5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_diff_atoms_race6(Config) ->
+ case dialyze(Config, ets_insert_diff_atoms_race6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_double1(Config) ->
+ case dialyze(Config, ets_insert_double1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_double2(Config) ->
+ case dialyze(Config, ets_insert_double2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_funs1(Config) ->
+ case dialyze(Config, ets_insert_funs1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_funs2(Config) ->
+ case dialyze(Config, ets_insert_funs2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_new(Config) ->
+ case dialyze(Config, ets_insert_new) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_insert_param(Config) ->
+ case dialyze(Config, ets_insert_param) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+extract_translations(Config) ->
+ case dialyze(Config, extract_translations) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_diff_atoms_race1(Config) ->
+ case dialyze(Config, mnesia_diff_atoms_race1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_diff_atoms_race2(Config) ->
+ case dialyze(Config, mnesia_diff_atoms_race2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_one_write_two(Config) ->
+ case dialyze(Config, mnesia_dirty_read_one_write_two) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_two_write_one(Config) ->
+ case dialyze(Config, mnesia_dirty_read_two_write_one) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_write_double1(Config) ->
+ case dialyze(Config, mnesia_dirty_read_write_double1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_write_double2(Config) ->
+ case dialyze(Config, mnesia_dirty_read_write_double2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_write_double3(Config) ->
+ case dialyze(Config, mnesia_dirty_read_write_double3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_write_double4(Config) ->
+ case dialyze(Config, mnesia_dirty_read_write_double4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_write_one(Config) ->
+ case dialyze(Config, mnesia_dirty_read_write_one) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mnesia_dirty_read_write_two(Config) ->
+ case dialyze(Config, mnesia_dirty_read_write_two) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_control_flow1(Config) ->
+ case dialyze(Config, whereis_control_flow1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_control_flow2(Config) ->
+ case dialyze(Config, whereis_control_flow2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_control_flow3(Config) ->
+ case dialyze(Config, whereis_control_flow3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_control_flow4(Config) ->
+ case dialyze(Config, whereis_control_flow4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_control_flow5(Config) ->
+ case dialyze(Config, whereis_control_flow5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_control_flow6(Config) ->
+ case dialyze(Config, whereis_control_flow6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_atoms_no_race(Config) ->
+ case dialyze(Config, whereis_diff_atoms_no_race) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_atoms_race(Config) ->
+ case dialyze(Config, whereis_diff_atoms_race) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions1(Config) ->
+ case dialyze(Config, whereis_diff_functions1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions1_nested(Config) ->
+ case dialyze(Config, whereis_diff_functions1_nested) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions1_pathsens(Config) ->
+ case dialyze(Config, whereis_diff_functions1_pathsens) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions1_twice(Config) ->
+ case dialyze(Config, whereis_diff_functions1_twice) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions2(Config) ->
+ case dialyze(Config, whereis_diff_functions2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions2_nested(Config) ->
+ case dialyze(Config, whereis_diff_functions2_nested) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions2_pathsens(Config) ->
+ case dialyze(Config, whereis_diff_functions2_pathsens) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions2_twice(Config) ->
+ case dialyze(Config, whereis_diff_functions2_twice) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions3(Config) ->
+ case dialyze(Config, whereis_diff_functions3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions3_nested(Config) ->
+ case dialyze(Config, whereis_diff_functions3_nested) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions3_pathsens(Config) ->
+ case dialyze(Config, whereis_diff_functions3_pathsens) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions4(Config) ->
+ case dialyze(Config, whereis_diff_functions4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions5(Config) ->
+ case dialyze(Config, whereis_diff_functions5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_functions6(Config) ->
+ case dialyze(Config, whereis_diff_functions6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules1(Config) ->
+ case dialyze(Config, whereis_diff_modules1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules1_pathsens(Config) ->
+ case dialyze(Config, whereis_diff_modules1_pathsens) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules1_rec(Config) ->
+ case dialyze(Config, whereis_diff_modules1_rec) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules2(Config) ->
+ case dialyze(Config, whereis_diff_modules2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules2_pathsens(Config) ->
+ case dialyze(Config, whereis_diff_modules2_pathsens) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules2_rec(Config) ->
+ case dialyze(Config, whereis_diff_modules2_rec) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules3(Config) ->
+ case dialyze(Config, whereis_diff_modules3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules_nested(Config) ->
+ case dialyze(Config, whereis_diff_modules_nested) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_modules_twice(Config) ->
+ case dialyze(Config, whereis_diff_modules_twice) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_vars_no_race(Config) ->
+ case dialyze(Config, whereis_diff_vars_no_race) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_diff_vars_race(Config) ->
+ case dialyze(Config, whereis_diff_vars_race) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module1(Config) ->
+ case dialyze(Config, whereis_intra_inter_module1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module2(Config) ->
+ case dialyze(Config, whereis_intra_inter_module2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module3(Config) ->
+ case dialyze(Config, whereis_intra_inter_module3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module4(Config) ->
+ case dialyze(Config, whereis_intra_inter_module4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module5(Config) ->
+ case dialyze(Config, whereis_intra_inter_module5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module6(Config) ->
+ case dialyze(Config, whereis_intra_inter_module6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module7(Config) ->
+ case dialyze(Config, whereis_intra_inter_module7) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_intra_inter_module8(Config) ->
+ case dialyze(Config, whereis_intra_inter_module8) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_param(Config) ->
+ case dialyze(Config, whereis_param) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_param_inter_module(Config) ->
+ case dialyze(Config, whereis_param_inter_module) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function1(Config) ->
+ case dialyze(Config, whereis_rec_function1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function2(Config) ->
+ case dialyze(Config, whereis_rec_function2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function3(Config) ->
+ case dialyze(Config, whereis_rec_function3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function4(Config) ->
+ case dialyze(Config, whereis_rec_function4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function5(Config) ->
+ case dialyze(Config, whereis_rec_function5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function6(Config) ->
+ case dialyze(Config, whereis_rec_function6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function7(Config) ->
+ case dialyze(Config, whereis_rec_function7) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_rec_function8(Config) ->
+ case dialyze(Config, whereis_rec_function8) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_try_catch(Config) ->
+ case dialyze(Config, whereis_try_catch) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars1(Config) ->
+ case dialyze(Config, whereis_vars1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars10(Config) ->
+ case dialyze(Config, whereis_vars10) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars11(Config) ->
+ case dialyze(Config, whereis_vars11) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars12(Config) ->
+ case dialyze(Config, whereis_vars12) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars13(Config) ->
+ case dialyze(Config, whereis_vars13) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars14(Config) ->
+ case dialyze(Config, whereis_vars14) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars15(Config) ->
+ case dialyze(Config, whereis_vars15) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars16(Config) ->
+ case dialyze(Config, whereis_vars16) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars17(Config) ->
+ case dialyze(Config, whereis_vars17) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars18(Config) ->
+ case dialyze(Config, whereis_vars18) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars19(Config) ->
+ case dialyze(Config, whereis_vars19) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars2(Config) ->
+ case dialyze(Config, whereis_vars2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars20(Config) ->
+ case dialyze(Config, whereis_vars20) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars21(Config) ->
+ case dialyze(Config, whereis_vars21) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars22(Config) ->
+ case dialyze(Config, whereis_vars22) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars3(Config) ->
+ case dialyze(Config, whereis_vars3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars4(Config) ->
+ case dialyze(Config, whereis_vars4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars5(Config) ->
+ case dialyze(Config, whereis_vars5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars6(Config) ->
+ case dialyze(Config, whereis_vars6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars7(Config) ->
+ case dialyze(Config, whereis_vars7) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars8(Config) ->
+ case dialyze(Config, whereis_vars8) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+whereis_vars9(Config) ->
+ case dialyze(Config, whereis_vars9) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/race_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..44e1720715
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/dialyzer_options
@@ -0,0 +1 @@
+{dialyzer_options, [{warnings, [race_conditions]}]}.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args1 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args1
new file mode 100644
index 0000000000..3bbe99d4af
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args1
@@ -0,0 +1,2 @@
+
+ets_insert_args1.erl:9: The call ets:insert(T::'foo',[{'counter',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(T::'foo','counter') call in ets_insert_args1.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args2 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args2
new file mode 100644
index 0000000000..34176c66ac
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args2
@@ -0,0 +1,2 @@
+
+ets_insert_args2.erl:9: The call ets:insert(T::'foo',[{'counter',number()} | {'kostis',number()} | {'maria',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(T::'foo','counter') call in ets_insert_args2.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args3 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args3
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args3
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args4 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args4
new file mode 100644
index 0000000000..8c45de08c2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args4
@@ -0,0 +1,2 @@
+
+ets_insert_args4.erl:9: The call ets:insert(T::'foo',{'counter',number()}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(T::'foo','counter') call in ets_insert_args4.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args5 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args5
new file mode 100644
index 0000000000..a4a0c021c2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args5
@@ -0,0 +1,2 @@
+
+ets_insert_args5.erl:9: The call ets:insert(T::'foo',{'counter',number(),number()}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(T::'foo','counter') call in ets_insert_args5.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args6 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args6
new file mode 100644
index 0000000000..10fa4c27e3
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args6
@@ -0,0 +1,2 @@
+
+ets_insert_args6.erl:9: The call ets:insert(T::'foo',[{'counter',number(),number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(T::'foo','counter') call in ets_insert_args6.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args7 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args7
new file mode 100644
index 0000000000..af43145c17
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args7
@@ -0,0 +1,2 @@
+
+ets_insert_args7.erl:17: The call ets:insert(Table::'foo',[{'counter',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::'foo','counter') call in ets_insert_args7.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args8 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args8
new file mode 100644
index 0000000000..5a2b41ed8c
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_args8
@@ -0,0 +1,2 @@
+
+ets_insert_args8.erl:16: The call ets:insert(Table::atom(),[{'counter',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::atom(),'counter') call in ets_insert_args8.erl on line 12
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow1 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow1
new file mode 100644
index 0000000000..d7df214939
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow1
@@ -0,0 +1,2 @@
+
+ets_insert_control_flow1.erl:15: The call ets:insert('foo',{'random',integer()}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo','random') call in ets_insert_control_flow1.erl on line 10
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow2 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow2
new file mode 100644
index 0000000000..cdaeafb0ed
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow2
@@ -0,0 +1,3 @@
+
+ets_insert_control_flow2.erl:15: The call ets:insert('foo',[{'pass',[pos_integer()]} | {'random',integer()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo','random') call in ets_insert_control_flow2.erl on line 10
+ets_insert_control_flow2.erl:19: The call ets:insert('foo',[{'pass',[pos_integer()]} | {'random',integer()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo','random') call in ets_insert_control_flow2.erl on line 10
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow3 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow3
new file mode 100644
index 0000000000..d640f564cd
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow3
@@ -0,0 +1,3 @@
+
+ets_insert_control_flow3.erl:21: The call ets:insert(Table::atom() | tid(),{'root',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'root') call in ets_insert_control_flow3.erl on line 12
+ets_insert_control_flow3.erl:23: The call ets:insert(Table::atom() | tid(),{'user',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'user') call in ets_insert_control_flow3.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow4 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow4
new file mode 100644
index 0000000000..6f34e75902
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow4
@@ -0,0 +1,3 @@
+
+ets_insert_control_flow4.erl:21: The call ets:insert(Table::atom() | tid(),{'pass',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow4.erl on line 12, the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow4.erl on line 13
+ets_insert_control_flow4.erl:23: The call ets:insert(Table::atom() | tid(),{'pass',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow4.erl on line 12, the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow4.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow5 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow5
new file mode 100644
index 0000000000..5af592f43f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_control_flow5
@@ -0,0 +1,5 @@
+
+ets_insert_control_flow5.erl:22: The call ets:insert(Table::atom() | tid(),{'welcome_msg',[any(),...]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'welcome_msg') call in ets_insert_control_flow5.erl on line 16
+ets_insert_control_flow5.erl:23: The call ets:insert(Table::atom() | tid(),{'pass',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow5.erl on line 12, the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow5.erl on line 13
+ets_insert_control_flow5.erl:25: The call ets:insert(Table::atom() | tid(),{'welcome_msg',[any(),...]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'welcome_msg') call in ets_insert_control_flow5.erl on line 16
+ets_insert_control_flow5.erl:26: The call ets:insert(Table::atom() | tid(),{'pass',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow5.erl on line 12, the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_control_flow5.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race1 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race1
new file mode 100644
index 0000000000..98ccf34e7d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race1
@@ -0,0 +1,2 @@
+
+ets_insert_diff_atoms_race1.erl:22: The call ets:insert(Table::'bar' | 'foo',[{'counter',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::'foo','counter') call in ets_insert_diff_atoms_race1.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race2 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race2
new file mode 100644
index 0000000000..b6af99b4cc
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race2
@@ -0,0 +1,2 @@
+
+ets_insert_diff_atoms_race2.erl:22: The call ets:insert(Table::'bar' | 'foo',[{'counter',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::'foo',Counter::'counter') call in ets_insert_diff_atoms_race2.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race3 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race3
new file mode 100644
index 0000000000..d79182c289
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race3
@@ -0,0 +1,2 @@
+
+ets_insert_diff_atoms_race3.erl:22: The call ets:insert(Table::'bar' | 'foo',{'counter',number()}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::'foo','counter') call in ets_insert_diff_atoms_race3.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race4 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race4
new file mode 100644
index 0000000000..5bb1b9f781
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race4
@@ -0,0 +1,2 @@
+
+ets_insert_diff_atoms_race4.erl:22: The call ets:insert(Table::'bar' | 'foo',{'counter',number()}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::'foo',Counter::'counter') call in ets_insert_diff_atoms_race4.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race5 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race5
new file mode 100644
index 0000000000..7db320e758
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race5
@@ -0,0 +1,2 @@
+
+ets_insert_diff_atoms_race5.erl:22: The call ets:insert(Table::'foo',[{'counter',number()} | {'index',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::'foo',Counter::'counter') call in ets_insert_diff_atoms_race5.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race6 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race6
new file mode 100644
index 0000000000..c029f79ed5
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_diff_atoms_race6
@@ -0,0 +1,2 @@
+
+ets_insert_diff_atoms_race6.erl:22: The call ets:insert(Table::'foo',{'counter',number()} | {'index',number()}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Tab::'foo',Counter::'counter') call in ets_insert_diff_atoms_race6.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double1 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double1
new file mode 100644
index 0000000000..b640b91271
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double1
@@ -0,0 +1,4 @@
+
+ets_insert_double1.erl:15: The call ets:insert('foo',[{'pass',[number()]} | {'random',integer()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo','random') call in ets_insert_double1.erl on line 10, the ets:lookup('foo','pass') call in ets_insert_double1.erl on line 27
+ets_insert_double1.erl:19: The call ets:insert('foo',[{'pass',[number()]} | {'random',integer()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo','random') call in ets_insert_double1.erl on line 10, the ets:lookup('foo','pass') call in ets_insert_double1.erl on line 27
+ets_insert_double1.erl:24: The call ets:insert('foo',{'pass','empty'}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo','pass') call in ets_insert_double1.erl on line 22
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double2 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double2
new file mode 100644
index 0000000000..cf61cb5ec3
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_double2
@@ -0,0 +1,4 @@
+
+ets_insert_double2.erl:15: The call ets:insert('foo',[{_,[number()] | integer()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo',Random::any()) call in ets_insert_double2.erl on line 10, the ets:lookup('foo',Pass::any()) call in ets_insert_double2.erl on line 27
+ets_insert_double2.erl:19: The call ets:insert('foo',[{_,[number()] | integer()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo',Random::any()) call in ets_insert_double2.erl on line 10, the ets:lookup('foo',Pass::any()) call in ets_insert_double2.erl on line 27
+ets_insert_double2.erl:24: The call ets:insert('foo',{_,'empty'}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo',Pass::any()) call in ets_insert_double2.erl on line 22
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs1 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs1
new file mode 100644
index 0000000000..540a0cf388
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs1
@@ -0,0 +1,2 @@
+
+ets_insert_funs1.erl:15: The call ets:insert('foo',[{'counter',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(T::'foo','counter') call in ets_insert_funs1.erl on line 9
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs2 b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs2
new file mode 100644
index 0000000000..6b618f72b6
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_funs2
@@ -0,0 +1,2 @@
+
+ets_insert_funs2.erl:9: The call ets:insert(T::'foo',[{'counter',number()},...]) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('foo','counter') call in ets_insert_funs2.erl on line 14
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_new b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_new
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_new
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_param b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_param
new file mode 100644
index 0000000000..58f934a190
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/ets_insert_param
@@ -0,0 +1,5 @@
+
+ets_insert_param.erl:13: The call ets:insert(Table::atom() | tid(),{'welcome_msg',[any(),...]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'welcome_msg') call in ets_insert_param.erl on line 10
+ets_insert_param.erl:14: The call ets:insert(Table::atom() | tid(),{'pass',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_param.erl on line 14, the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_param.erl on line 15
+ets_insert_param.erl:17: The call ets:insert(Table::atom() | tid(),{'welcome_msg',[any(),...]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'welcome_msg') call in ets_insert_param.erl on line 10
+ets_insert_param.erl:18: The call ets:insert(Table::atom() | tid(),{'pass',[pos_integer()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup(Table::atom() | tid(),'pass') call in ets_insert_param.erl on line 18
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/extract_translations b/lib/dialyzer/test/race_tests_SUITE_data/results/extract_translations
new file mode 100644
index 0000000000..f7d5abc6f5
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/extract_translations
@@ -0,0 +1,5 @@
+
+extract_translations.erl:140: The call ets:insert('files',{atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('files',File::atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()]) call in extract_translations.erl on line 135
+extract_translations.erl:146: The call ets:insert('translations',{_,[]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('translations',Str::any()) call in extract_translations.erl on line 126
+extract_translations.erl:152: The call ets:insert('files',{atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('files',File::atom() | binary() | [atom() | binary() | [atom() | binary() | [any()] | char()] | char()]) call in extract_translations.erl on line 148
+extract_translations.erl:154: The call ets:insert('translations',{_,[]}) might have an unintended effect due to a possible race condition caused by its combination with the ets:lookup('translations',Str::any()) call in extract_translations.erl on line 126
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race1 b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race1
new file mode 100644
index 0000000000..f5e544dc2a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race1
@@ -0,0 +1,2 @@
+
+mnesia_diff_atoms_race1.erl:33: The call mnesia:dirty_write(Table::'employee' | 'employer',Record::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read(Tab::'employee',Eno::any()) call in mnesia_diff_atoms_race1.erl on line 19
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race2 b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race2
new file mode 100644
index 0000000000..0ad0bc0afd
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_diff_atoms_race2
@@ -0,0 +1,2 @@
+
+mnesia_diff_atoms_race2.erl:37: The call mnesia:dirty_write(Record::#employee{salary::number()} | #employer{}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read(Tab::'employee',Eno::any()) call in mnesia_diff_atoms_race2.erl on line 26
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_one_write_two b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_one_write_two
new file mode 100644
index 0000000000..a4f3c269f1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_one_write_two
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_one_write_two.erl:20: The call mnesia:dirty_write('employee',New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read({'employee',_}) call in mnesia_dirty_read_one_write_two.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_two_write_one b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_two_write_one
new file mode 100644
index 0000000000..6e666d755f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_two_write_one
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_two_write_one.erl:20: The call mnesia:dirty_write(New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read('employee',Eno::any()) call in mnesia_dirty_read_two_write_one.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double1 b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double1
new file mode 100644
index 0000000000..e953c6948b
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double1
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_write_double1.erl:20: The call mnesia:dirty_write('employee',New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read('employee',Eno::any()) call in mnesia_dirty_read_write_double1.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double2 b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double2
new file mode 100644
index 0000000000..2a0b4eddd0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double2
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_write_double2.erl:20: The call mnesia:dirty_write(New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read({'employee',_}) call in mnesia_dirty_read_write_double2.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double3 b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double3
new file mode 100644
index 0000000000..fe51a5e838
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double3
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_write_double3.erl:20: The call mnesia:dirty_write('employee',New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read({'employee',_}) call in mnesia_dirty_read_write_double3.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double4 b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double4
new file mode 100644
index 0000000000..d6a60d847a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_double4
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_write_double4.erl:20: The call mnesia:dirty_write(New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read('employee',Eno::any()) call in mnesia_dirty_read_write_double4.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_one b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_one
new file mode 100644
index 0000000000..b47f66eb79
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_one
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_write_one.erl:20: The call mnesia:dirty_write(New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read({'employee',_}) call in mnesia_dirty_read_write_one.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_two b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_two
new file mode 100644
index 0000000000..2faf55fe72
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/mnesia_dirty_read_write_two
@@ -0,0 +1,2 @@
+
+mnesia_dirty_read_write_two.erl:20: The call mnesia:dirty_write('employee',New::#employee{salary::number()}) might have an unintended effect due to a possible race condition caused by its combination with the mnesia:dirty_read('employee',Eno::any()) call in mnesia_dirty_read_write_two.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow1 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow1
new file mode 100644
index 0000000000..0fcf13c50a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow1
@@ -0,0 +1,2 @@
+
+whereis_control_flow1.erl:13: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_control_flow1.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow2 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow2
new file mode 100644
index 0000000000..d0c048701d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow2
@@ -0,0 +1,3 @@
+
+whereis_control_flow2.erl:14: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_control_flow2.erl on line 8
+whereis_control_flow2.erl:15: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_control_flow2.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow3 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow3
new file mode 100644
index 0000000000..0d93428758
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow3
@@ -0,0 +1,2 @@
+
+whereis_control_flow3.erl:25: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_control_flow3.erl on line 11, the erlang:whereis(AnAtom::any()) call in whereis_control_flow3.erl on line 18
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow4 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow4
new file mode 100644
index 0000000000..f0ce12d0a4
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow4
@@ -0,0 +1,3 @@
+
+whereis_control_flow4.erl:18: The call erlang:register('maria',Pid1::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis('maria') call in whereis_control_flow4.erl on line 8
+whereis_control_flow4.erl:19: The call erlang:register('kostis',Pid2::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis('kostis') call in whereis_control_flow4.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow5 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow5
new file mode 100644
index 0000000000..fd809139e4
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow5
@@ -0,0 +1,2 @@
+
+whereis_control_flow5.erl:11: The call erlang:unregister(AnAtom::atom()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_control_flow5.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow6 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow6
new file mode 100644
index 0000000000..ba89cc5624
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_control_flow6
@@ -0,0 +1,2 @@
+
+whereis_control_flow6.erl:11: The call erlang:unregister('kostis') might fail due to a possible race condition caused by its combination with the erlang:whereis('kostis') call in whereis_control_flow6.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_no_race b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_no_race
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_no_race
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_race b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_race
new file mode 100644
index 0000000000..76c746e2f4
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_atoms_race
@@ -0,0 +1,2 @@
+
+whereis_diff_atoms_race.erl:34: The call erlang:register(Atom::'kostis' | 'maria',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::'maria') call in whereis_diff_atoms_race.erl on line 14
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1
new file mode 100644
index 0000000000..14c157885f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1
@@ -0,0 +1,3 @@
+
+whereis_diff_functions1.erl:10: The call erlang:register('master',pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis('master') call in whereis_diff_functions1.erl on line 8
+whereis_diff_functions1.erl:18: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions1.erl on line 15
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_nested b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_nested
new file mode 100644
index 0000000000..c791d4b347
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_nested
@@ -0,0 +1,2 @@
+
+whereis_diff_functions1_nested.erl:23: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions1_nested.erl on line 11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_pathsens b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_pathsens
new file mode 100644
index 0000000000..d22e696196
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_pathsens
@@ -0,0 +1,2 @@
+
+whereis_diff_functions1_pathsens.erl:32: The call erlang:register(Atom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::'kostis') call in whereis_diff_functions1_pathsens.erl on line 15, the erlang:whereis(AnAtom::'kostis') call in whereis_diff_functions1_pathsens.erl on line 22
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_twice b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_twice
new file mode 100644
index 0000000000..3024c77d91
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions1_twice
@@ -0,0 +1,3 @@
+
+whereis_diff_functions1_twice.erl:27: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions1_twice.erl on line 11
+whereis_diff_functions1_twice.erl:30: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::atom()) call in whereis_diff_functions1_twice.erl on line 15
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2
new file mode 100644
index 0000000000..9a22eb7e17
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2
@@ -0,0 +1,2 @@
+
+whereis_diff_functions2.erl:25: The call erlang:register(Atom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::'kostis') call in whereis_diff_functions2.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_nested b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_nested
new file mode 100644
index 0000000000..0e757fbccc
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_nested
@@ -0,0 +1,2 @@
+
+whereis_diff_functions2_nested.erl:20: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions2_nested.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_pathsens b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_pathsens
new file mode 100644
index 0000000000..c102b39243
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_pathsens
@@ -0,0 +1,2 @@
+
+whereis_diff_functions2_pathsens.erl:29: The call erlang:register(Atom::atom(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions2_pathsens.erl on line 12, the erlang:whereis(AnAtom::any()) call in whereis_diff_functions2_pathsens.erl on line 19
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_twice b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_twice
new file mode 100644
index 0000000000..b048bc6bed
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions2_twice
@@ -0,0 +1,3 @@
+
+whereis_diff_functions2_twice.erl:24: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions2_twice.erl on line 8
+whereis_diff_functions2_twice.erl:27: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::atom()) call in whereis_diff_functions2_twice.erl on line 12
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3
new file mode 100644
index 0000000000..6d5154b411
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3
@@ -0,0 +1,2 @@
+
+whereis_diff_functions3.erl:8: The call erlang:register(AnAtom::atom(),'undefined' | pid() | port()) might fail due to a possible race condition caused by its combination with the erlang:whereis(Atom::any()) call in whereis_diff_functions3.erl on line 11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_nested b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_nested
new file mode 100644
index 0000000000..298c4c7178
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_nested
@@ -0,0 +1,2 @@
+
+whereis_diff_functions3_nested.erl:21: The call erlang:unregister(Atom::atom()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions3_nested.erl on line 11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_pathsens b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_pathsens
new file mode 100644
index 0000000000..5d1ea5bda5
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions3_pathsens
@@ -0,0 +1,2 @@
+
+whereis_diff_functions3_pathsens.erl:29: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions3_pathsens.erl on line 12, the erlang:whereis(AnAtom::any()) call in whereis_diff_functions3_pathsens.erl on line 19
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions4 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions4
new file mode 100644
index 0000000000..cb51301f1e
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions4
@@ -0,0 +1,2 @@
+
+whereis_diff_functions4.erl:32: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions4.erl on line 13, the erlang:whereis(AnAtom::atom()) call in whereis_diff_functions4.erl on line 17
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions5 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions5
new file mode 100644
index 0000000000..34c477e05a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions5
@@ -0,0 +1,2 @@
+
+whereis_diff_functions5.erl:22: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions5.erl on line 10
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions6 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions6
new file mode 100644
index 0000000000..8840ef4ca7
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_functions6
@@ -0,0 +1,2 @@
+
+whereis_diff_functions6.erl:29: The call erlang:register(Atom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_functions6.erl on line 10, the erlang:whereis(AnAtom::atom()) call in whereis_diff_functions6.erl on line 14
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1
new file mode 100644
index 0000000000..8f7d0b7a17
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1
@@ -0,0 +1,2 @@
+
+whereis_diff_modules2.erl:11: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_modules1.erl on line 10
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_pathsens b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_pathsens
new file mode 100644
index 0000000000..40d36eb7d2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_pathsens
@@ -0,0 +1,2 @@
+
+whereis_diff_modules2_pathsens.erl:12: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_modules1_pathsens.erl on line 12, the erlang:whereis(AnAtom::any()) call in whereis_diff_modules1_pathsens.erl on line 19
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_rec b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_rec
new file mode 100644
index 0000000000..278b679aba
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules1_rec
@@ -0,0 +1,2 @@
+
+whereis_diff_modules1_rec.erl:10: The call erlang:register(AnAtom::any(),Id::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(NextAtom::any()) call in whereis_diff_modules1_rec.erl on line 12
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2
new file mode 100644
index 0000000000..a4e5a000e2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2
@@ -0,0 +1,2 @@
+
+whereis_diff_modules3.erl:8: The call erlang:register(AnAtom::atom(),'undefined' | pid() | port()) might fail due to a possible race condition caused by its combination with the erlang:whereis(Atom::any()) call in whereis_diff_modules4.erl on line 11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_pathsens b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_pathsens
new file mode 100644
index 0000000000..cc93133019
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_pathsens
@@ -0,0 +1,2 @@
+
+whereis_diff_modules4_pathsens.erl:13: The call erlang:register(Atom::atom(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_modules3_pathsens.erl on line 12, the erlang:whereis(AnAtom::any()) call in whereis_diff_modules3_pathsens.erl on line 19
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_rec b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_rec
new file mode 100644
index 0000000000..8874ab3553
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules2_rec
@@ -0,0 +1,2 @@
+
+whereis_diff_modules3_rec.erl:13: The call erlang:register(AnAtom::any(),Id::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(NextAtom::any()) call in whereis_diff_modules3_rec.erl on line 15
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules3 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules3
new file mode 100644
index 0000000000..8e839a53dc
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules3
@@ -0,0 +1,2 @@
+
+whereis_diff_modules6.erl:11: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_modules5.erl on line 10, the erlang:whereis(AnAtom::atom()) call in whereis_diff_modules5.erl on line 14
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_nested b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_nested
new file mode 100644
index 0000000000..9192dc0708
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_nested
@@ -0,0 +1,2 @@
+
+whereis_diff_modules3_nested.erl:11: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_modules1_nested.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_twice b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_twice
new file mode 100644
index 0000000000..3758347255
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_modules_twice
@@ -0,0 +1,3 @@
+
+whereis_diff_modules2_twice.erl:11: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::atom()) call in whereis_diff_modules1_twice.erl on line 12
+whereis_diff_modules2_twice.erl:8: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_diff_modules1_twice.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_no_race b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_no_race
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_no_race
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_race b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_race
new file mode 100644
index 0000000000..e34b4d2138
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_diff_vars_race
@@ -0,0 +1,2 @@
+
+whereis_diff_vars_race.erl:16: The call erlang:register(Atom2::any(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(Atom1::any()) call in whereis_diff_vars_race.erl on line 13
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module1 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module1
new file mode 100644
index 0000000000..3ed6f50d8d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module1
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module2.erl:11: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module1.erl on line 10
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module2 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module2
new file mode 100644
index 0000000000..737054fe67
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module2
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module4.erl:14: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module3.erl on line 10
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module3 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module3
new file mode 100644
index 0000000000..4111498efe
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module3
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module6.erl:14: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module5.erl on line 10
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module4 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module4
new file mode 100644
index 0000000000..4e70a8efa1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module4
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module7.erl:8: The call erlang:register(AnAtom::atom(),pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module8.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module5 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module5
new file mode 100644
index 0000000000..f6a10f52fd
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module5
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module9.erl:8: The call erlang:register(AnAtom::atom(),pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module10.erl on line 11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module6 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module6
new file mode 100644
index 0000000000..a8623ee985
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module6
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module12.erl:14: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module11.erl on line 10, the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module11.erl on line 21
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module7 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module7
new file mode 100644
index 0000000000..e39d630c75
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module7
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module14.erl:11: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module13.erl on line 10, the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module14.erl on line 16
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module8 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module8
new file mode 100644
index 0000000000..58ae498bd4
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_intra_inter_module8
@@ -0,0 +1,2 @@
+
+whereis_intra_inter_module16.erl:11: The call erlang:register(Atom::any(),Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module15.erl on line 10, the erlang:whereis(AnAtom::any()) call in whereis_intra_inter_module16.erl on line 16
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param
new file mode 100644
index 0000000000..fb7563b1c7
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param
@@ -0,0 +1,2 @@
+
+whereis_param.erl:8: The call erlang:register(AnAtom::atom(),pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_param.erl on line 11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param_inter_module b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param_inter_module
new file mode 100644
index 0000000000..fc3e9ca59d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_param_inter_module
@@ -0,0 +1,2 @@
+
+whereis_param_inter_module1.erl:8: The call erlang:register(AnAtom::atom(),pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_param_inter_module2.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function1 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function1
new file mode 100644
index 0000000000..2cf1960d65
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function1
@@ -0,0 +1,2 @@
+
+whereis_rec_function1.erl:14: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_rec_function1.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function2 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function2
new file mode 100644
index 0000000000..4b55bc61ad
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function2
@@ -0,0 +1,2 @@
+
+whereis_rec_function2.erl:13: The call erlang:register(AnAtom::atom(),Id::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(NextAtom::any()) call in whereis_rec_function2.erl on line 15
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function3 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function3
new file mode 100644
index 0000000000..638e9b0f4b
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function3
@@ -0,0 +1,2 @@
+
+whereis_rec_function3.erl:13: The call erlang:register(AnAtom::atom(),Id::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(NextAtom::any()) call in whereis_rec_function3.erl on line 16, the erlang:whereis(NextAtom::atom()) call in whereis_rec_function3.erl on line 20
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function4 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function4
new file mode 100644
index 0000000000..f255cb8170
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function4
@@ -0,0 +1,2 @@
+
+whereis_rec_function4.erl:13: The call erlang:register(AnAtom::atom(),Id::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(NextAtom::any()) call in whereis_rec_function4.erl on line 15
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function5 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function5
new file mode 100644
index 0000000000..78d81b9a57
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function5
@@ -0,0 +1,2 @@
+
+whereis_rec_function5.erl:10: The call erlang:register(AnAtom::any(),Id::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(NextAtom::any()) call in whereis_rec_function5.erl on line 12
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function6 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function6
new file mode 100644
index 0000000000..6df6de1922
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function6
@@ -0,0 +1,2 @@
+
+whereis_rec_function6.erl:10: The call erlang:register(AnAtom::any(),Id::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(NextAtom::any()) call in whereis_rec_function6.erl on line 12
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function7 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function7
new file mode 100644
index 0000000000..f3ddb0b537
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function7
@@ -0,0 +1,2 @@
+
+whereis_rec_function7.erl:15: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_rec_function7.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function8 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function8
new file mode 100644
index 0000000000..9d731ada29
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_rec_function8
@@ -0,0 +1,2 @@
+
+whereis_rec_function8.erl:18: The call erlang:register(AnAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_rec_function8.erl on line 11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_try_catch b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_try_catch
new file mode 100644
index 0000000000..fecb0756bd
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_try_catch
@@ -0,0 +1,3 @@
+
+whereis_try_catch.erl:13: The call erlang:register('master',Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis('master') call in whereis_try_catch.erl on line 8
+whereis_try_catch.erl:21: The call erlang:register('master',Pid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis('master') call in whereis_try_catch.erl on line 18
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars1 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars1
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars1
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars10 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars10
new file mode 100644
index 0000000000..36a59096e0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars10
@@ -0,0 +1,2 @@
+
+whereis_vars10.erl:17: The call erlang:register(OtherAtom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars10.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars11 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars11
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars11
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars12 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars12
new file mode 100644
index 0000000000..d34e1b1c7e
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars12
@@ -0,0 +1,2 @@
+
+whereis_vars12.erl:16: The call erlang:register(OtherAtom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars12.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars13 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars13
new file mode 100644
index 0000000000..e6ae40cee0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars13
@@ -0,0 +1,2 @@
+
+whereis_vars13.erl:16: The call erlang:register(OtherAtom::'kostis',APid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars13.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars14 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars14
new file mode 100644
index 0000000000..cdd23a7471
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars14
@@ -0,0 +1,2 @@
+
+whereis_vars14.erl:16: The call erlang:register(OtherAtom::'kostis',APid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars14.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars15 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars15
new file mode 100644
index 0000000000..7f79852978
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars15
@@ -0,0 +1,2 @@
+
+whereis_vars15.erl:17: The call erlang:register(OtherAtom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars15.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars16 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars16
new file mode 100644
index 0000000000..0f28dff25d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars16
@@ -0,0 +1,2 @@
+
+whereis_vars16.erl:17: The call erlang:register(OtherAtom::any(),APid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars16.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars17 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars17
new file mode 100644
index 0000000000..3681c1aa9f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars17
@@ -0,0 +1,2 @@
+
+whereis_vars17.erl:17: The call erlang:register(OtherAtom::any(),APid::any()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars17.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars18 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars18
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars18
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars19 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars19
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars19
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars2 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars2
new file mode 100644
index 0000000000..1636a6e908
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars2
@@ -0,0 +1,2 @@
+
+whereis_vars2.erl:14: The call erlang:register(OtherAtom::atom(),Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars2.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars20 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars20
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars20
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars21 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars21
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars21
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars22 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars22
new file mode 100644
index 0000000000..0f258cc097
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars22
@@ -0,0 +1,2 @@
+
+whereis_vars22.erl:21: The call erlang:register(OtherAtom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars22.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars3 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars3
new file mode 100644
index 0000000000..4f43b9adca
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars3
@@ -0,0 +1,2 @@
+
+whereis_vars3.erl:14: The call erlang:register(OtherAtom::atom(),APid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars3.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars4 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars4
new file mode 100644
index 0000000000..9eb833c42a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars4
@@ -0,0 +1,2 @@
+
+whereis_vars4.erl:14: The call erlang:register(OtherAtom::atom() | pid(),APid::atom() | pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars4.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars5 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars5
new file mode 100644
index 0000000000..b1c269c020
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars5
@@ -0,0 +1,2 @@
+
+whereis_vars5.erl:16: The call erlang:register(OtherAtom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars5.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars6 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars6
new file mode 100644
index 0000000000..88c58cfdf2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars6
@@ -0,0 +1,2 @@
+
+whereis_vars6.erl:16: The call erlang:register(OtherAtom::'kostis',APid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars6.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars7 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars7
new file mode 100644
index 0000000000..8924869634
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars7
@@ -0,0 +1,2 @@
+
+whereis_vars7.erl:16: The call erlang:register(OtherAtom::'kostis',APid::atom() | pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars7.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars8 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars8
new file mode 100644
index 0000000000..d9d8f3872f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars8
@@ -0,0 +1,2 @@
+
+whereis_vars8.erl:16: The call erlang:register(OtherAtom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars8.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars9 b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars9
new file mode 100644
index 0000000000..da52ca1f82
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/results/whereis_vars9
@@ -0,0 +1,2 @@
+
+whereis_vars9.erl:16: The call erlang:register(OtherAtom::'kostis',Pid::pid()) might fail due to a possible race condition caused by its combination with the erlang:whereis(AnAtom::any()) call in whereis_vars9.erl on line 8
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args1.erl
new file mode 100644
index 0000000000..78b586f097
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args1.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args1).
+-export([start/0]).
+
+start() ->
+ F = fun(T) -> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, [{counter, N+1}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args2.erl
new file mode 100644
index 0000000000..7e53b1e8bf
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args2.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args2).
+-export([start/0]).
+
+start() ->
+ F = fun(T)-> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, [{counter, N+1}, {maria, N+1}, {kostis, N+1}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args3.erl
new file mode 100644
index 0000000000..b99bde14fa
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args3.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args3).
+-export([start/0]).
+
+start() ->
+ F = fun(T)-> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, [{maria, N+1}, {kostis, N+1}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args4.erl
new file mode 100644
index 0000000000..7bf3599c65
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args4.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args4).
+-export([start/0]).
+
+start() ->
+ F = fun(T)-> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, {counter, N+1})
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args5.erl
new file mode 100644
index 0000000000..93fef43cf1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args5.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args5).
+-export([start/0]).
+
+start() ->
+ F = fun(T)-> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, {counter, N+1, N+2})
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0, 0}),
+ io:format("Inserted ~w\n", [{counter, 0, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args6.erl
new file mode 100644
index 0000000000..2a803ccaac
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args6.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args6).
+-export([start/0]).
+
+start() ->
+ F = fun(T)-> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, [{counter, N+1, N+2}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0, 0}),
+ io:format("Inserted ~w\n", [{counter, 0, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args7.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args7.erl
new file mode 100644
index 0000000000..adc13703a7
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args7.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args7).
+-export([test/0]).
+
+test() ->
+ Foo = foo,
+ ets:new(Foo, [named_table, public]),
+ race(Foo).
+
+race(Tab) ->
+ [{_, N}] = ets:lookup(Tab, counter),
+ aux(Tab, N).
+
+aux(Table, N) ->
+ ets:insert(Table, [{counter, N+1}]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args8.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args8.erl
new file mode 100644
index 0000000000..832fc2eef1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_args8.erl
@@ -0,0 +1,16 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args8).
+-export([test/1]).
+
+test(Foo) ->
+ ets:new(Foo, [named_table, public]),
+ race(Foo).
+
+race(Tab) ->
+ [{_, N}] = ets:lookup(Tab, counter),
+ aux(Tab, N).
+
+aux(Table, N) ->
+ ets:insert(Table, [{counter, N+1}]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow1.erl
new file mode 100644
index 0000000000..7b56495e47
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow1.erl
@@ -0,0 +1,20 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account control flow that might exist.
+
+-module(ets_insert_control_flow1).
+-export([start/0]).
+
+start() ->
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {random, random:uniform(maria:get_int())}),
+ io:format("Inserted ~w\n", [{_, N}] = ets:lookup(foo, random)),
+ case (N rem 2 == 0) of
+ true ->
+ io:format("\nInserted an even number\n", []),
+ io:format("\nWill make it odd\n", []),
+ ets:insert(foo, {random, N+1});
+ false -> ok
+ end,
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, random),
+ io:format("Random odd integer: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow2.erl
new file mode 100644
index 0000000000..434ca113ee
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow2.erl
@@ -0,0 +1,26 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account control flow that might exist.
+
+-module(ets_insert_control_flow2).
+-export([start/0]).
+
+start() ->
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {random, random:uniform(150)}),
+ io:format("Inserted ~w\n", [{_, N}] = ets:lookup(foo, random)),
+ case (N rem 2 == 0) of
+ true ->
+ io:format("\nInserted an even integer\n", []),
+ io:format("\nWill make it odd and generate password\n", []),
+ ets:insert(foo, [{random, N+1}, {pass, generate_password(N)}]);
+ false ->
+ io:format("\nInserted an odd integer\n", []),
+ io:format("\nWill make it even and generate password\n", []),
+ ets:insert(foo, [{random, N+1}, {pass, generate_password(N)}])
+ end,
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, pass),
+ io:format("New password: ~w\n", [ObjectList]).
+
+generate_password(N) ->
+ lists:map(fun (_) -> random:uniform(90)+$\s+1 end, lists:seq(1,N)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow3.erl
new file mode 100644
index 0000000000..9c6a22eb05
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow3.erl
@@ -0,0 +1,31 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account control flow that might exist.
+
+-module(ets_insert_control_flow3).
+-export([start/1]).
+
+start(User) ->
+ Table = ets:new(table, [public]),
+ mod:process(Table),
+ [{_, N}] =
+ case User of
+ root -> ets:lookup(Table, root);
+ user -> ets:lookup(Table, user);
+ Other -> [{undefined, -1}]
+ end,
+ case N of
+ -1 -> io:format("\nUnknown User\n", []);
+ 0 ->
+ case User of
+ root ->
+ ets:insert(Table, {User, Pass = generate_password(N) ++ generate_password(N+1)});
+ user ->
+ ets:insert(Table, {User, Pass = generate_password(N)})
+ end,
+ io:format("\nYour new pass is ~w\n", [Pass]);
+ P ->
+ io:format("\nYour pass is ~w\n", [P])
+ end.
+
+generate_password(N) ->
+ lists:map(fun (_) -> random:uniform(90)+$\s+1 end, lists:seq(1,N)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow4.erl
new file mode 100644
index 0000000000..caa3804614
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow4.erl
@@ -0,0 +1,31 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account control flow that might exist.
+
+-module(ets_insert_control_flow4).
+-export([start/1]).
+
+start(User) ->
+ Table = ets:new(table, [public]),
+ mod:process(Table),
+ [{_, N}] =
+ case User of
+ root -> ets:lookup(Table, pass);
+ user -> ets:lookup(Table, pass);
+ _Other -> [{undefined, -1}]
+ end,
+ case N of
+ -1 -> io:format("\nUnknown User\n", []);
+ 0 ->
+ case User of
+ root ->
+ ets:insert(Table, {pass, Pass = generate_password(N) ++ generate_password(N+1)});
+ user ->
+ ets:insert(Table, {pass, Pass = generate_password(N)})
+ end,
+ io:format("\nYour new pass is ~w\n", [Pass]);
+ P ->
+ io:format("\nYour pass is ~w\n", [P])
+ end.
+
+generate_password(N) ->
+ lists:map(fun (_) -> random:uniform(90)+$\s+1 end, lists:seq(1,N)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow5.erl
new file mode 100644
index 0000000000..b19fd776ec
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_control_flow5.erl
@@ -0,0 +1,34 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account control flow that might exist.
+
+-module(ets_insert_control_flow5).
+-export([start/1]).
+
+start(User) ->
+ Table = ets:new(table, [public]),
+ mod:process(Table),
+ [{_, N}] =
+ case User of
+ root -> ets:lookup(Table, pass);
+ user -> ets:lookup(Table, pass);
+ Other -> [{undefined, -1}]
+ end,
+ [{_, Msg}] = ets:lookup(Table, welcome_msg),
+ case N of
+ -1 -> io:format("\nUnknown User\n", []);
+ 0 ->
+ case User of
+ root ->
+ ets:insert(Table, {welcome_msg, Msg ++ "root"}),
+ ets:insert(Table, {pass, Pass = generate_password(N) ++ generate_password(N+1)});
+ user ->
+ ets:insert(Table, {welcome_msg, Msg ++ "user"}),
+ ets:insert(Table, {pass, Pass = generate_password(N)})
+ end,
+ io:format("\nYour new pass is ~w\n", [Pass]);
+ P ->
+ io:format("\nYour pass is ~w\n", [P])
+ end.
+
+generate_password(N) ->
+ lists:map(fun (_) -> random:uniform(90)+$\s+1 end, lists:seq(1,N)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race1.erl
new file mode 100644
index 0000000000..57022c86d4
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race1.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between ets:lookup/
+%% ets:insert is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(ets_insert_diff_atoms_race1).
+-export([test/0]).
+
+test() ->
+ ets:new(foo, [named_table, public]),
+ {race(foo), no_race(foo)}.
+
+race(Tab) ->
+ [{_, N}] = ets:lookup(Tab, counter),
+ aux(Tab, N).
+
+no_race(Tab) ->
+ [{_, N}] = ets:lookup(Tab, counter),
+ AnotherTab = bar,
+ aux(AnotherTab, N).
+
+aux(Table, N) ->
+ ets:insert(Table, [{counter, N+1}]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race2.erl
new file mode 100644
index 0000000000..233a19087e
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race2.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between ets:lookup/
+%% ets:insert is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(ets_insert_diff_atoms_race2).
+-export([test/0]).
+
+test() ->
+ ets:new(foo, [named_table, public]),
+ {race(foo, counter), no_race(foo, counter)}.
+
+race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ aux(Tab, Counter, N).
+
+no_race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ AnotherTab = bar,
+ aux(AnotherTab, Counter, N).
+
+aux(Table, Counter, N) ->
+ ets:insert(Table, [{Counter, N+1}]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race3.erl
new file mode 100644
index 0000000000..a09e4644f8
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race3.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between ets:lookup/
+%% ets:insert is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(ets_insert_diff_atoms_race3).
+-export([test/0]).
+
+test() ->
+ ets:new(foo, [named_table, public]),
+ {race(foo), no_race(foo)}.
+
+race(Tab) ->
+ [{_, N}] = ets:lookup(Tab, counter),
+ aux(Tab, N).
+
+no_race(Tab) ->
+ [{_, N}] = ets:lookup(Tab, counter),
+ AnotherTab = bar,
+ aux(AnotherTab, N).
+
+aux(Table, N) ->
+ ets:insert(Table, {counter, N+1}).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race4.erl
new file mode 100644
index 0000000000..d0a3f0a1d1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race4.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between ets:lookup/
+%% ets:insert is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(ets_insert_diff_atoms_race4).
+-export([test/0]).
+
+test() ->
+ ets:new(foo, [named_table, public]),
+ {race(foo, counter), no_race(foo, counter)}.
+
+race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ aux(Tab, Counter, N).
+
+no_race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ AnotherTab = bar,
+ aux(AnotherTab, Counter, N).
+
+aux(Table, Counter, N) ->
+ ets:insert(Table, {Counter, N+1}).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race5.erl
new file mode 100644
index 0000000000..bbccaab94d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race5.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between ets:lookup/
+%% ets:insert is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(ets_insert_diff_atoms_race5).
+-export([test/0]).
+
+test() ->
+ ets:new(foo, [named_table, public]),
+ {race(foo, counter), no_race(foo, counter)}.
+
+race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ aux(Tab, Counter, N).
+
+no_race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ AnotherCounter = index,
+ aux(Tab, AnotherCounter, N).
+
+aux(Table, Counter, N) ->
+ ets:insert(Table, [{Counter, N+1}]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race6.erl
new file mode 100644
index 0000000000..17457e2b44
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_diff_atoms_race6.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between ets:lookup/
+%% ets:insert is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(ets_insert_diff_atoms_race6).
+-export([test/0]).
+
+test() ->
+ ets:new(foo, [named_table, public]),
+ {race(foo, counter), no_race(foo, counter)}.
+
+race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ aux(Tab, Counter, N).
+
+no_race(Tab, Counter) ->
+ [{_, N}] = ets:lookup(Tab, Counter),
+ AnotherCounter = index,
+ aux(Tab, AnotherCounter, N).
+
+aux(Table, Counter, N) ->
+ ets:insert(Table, {Counter, N+1}).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double1.erl
new file mode 100644
index 0000000000..92fa945b73
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double1.erl
@@ -0,0 +1,28 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account multiple ets:inserts that might exist.
+
+-module(ets_insert_double1).
+-export([start/0]).
+
+start() ->
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {random, random:uniform(150)}),
+ io:format("Inserted ~w\n", [{_, N}] = ets:lookup(foo, random)),
+ case (N rem 2 == 0) of
+ true ->
+ io:format("\nInserted an even integer\n", []),
+ io:format("\nWill make it odd and generate new password\n", []),
+ ets:insert(foo, [{random, N+1}, {pass, generate_password(N)}]);
+ false ->
+ io:format("\nInserted an odd integer\n", []),
+ io:format("\nWill make it even and generate new password\n", []),
+ ets:insert(foo, [{random, N+1}, {pass, generate_password(N)}])
+ end,
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, pass),
+ io:format("New password: ~w\n", [ObjectList]),
+ ets:insert(foo, {pass, 'empty'}).
+
+generate_password(N) ->
+ [{_, P}] = ets:lookup(foo, pass),
+ lists:map(fun (_) -> random:uniform(90)+P+$\s+1 end, lists:seq(1,N)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double2.erl
new file mode 100644
index 0000000000..dc2b14ada0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_double2.erl
@@ -0,0 +1,28 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account multiple ets:inserts that might exist.
+
+-module(ets_insert_double2).
+-export([start/2]).
+
+start(Random, Pass) ->
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {Random, random:uniform(150)}),
+ io:format("Inserted ~w\n", [{_, N}] = ets:lookup(foo, Random)),
+ case (N rem 2 == 0) of
+ true ->
+ io:format("\nInserted an even integer\n", []),
+ io:format("\nWill make it odd and generate new password\n", []),
+ ets:insert(foo, [{Random, N+1}, {Pass, generate_password(Pass, N)}]);
+ false ->
+ io:format("\nInserted an odd integer\n", []),
+ io:format("\nWill make it even and generate new password\n", []),
+ ets:insert(foo, [{Random, N+1}, {Pass, generate_password(Pass, N)}])
+ end,
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, Pass),
+ io:format("New password: ~w\n", [ObjectList]),
+ ets:insert(foo, {Pass, 'empty'}).
+
+generate_password(Pass, N) ->
+ [{_, P}] = ets:lookup(foo, Pass),
+ lists:map(fun (_) -> random:uniform(90)+P+$\s+1 end, lists:seq(1,N)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs1.erl
new file mode 100644
index 0000000000..4a0a012fe3
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs1.erl
@@ -0,0 +1,18 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the anonymous functions.
+
+-module(ets_insert_funs1).
+-export([start/0]).
+
+start() ->
+ F = fun(T) ->
+ ets:lookup(T, counter)
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ [{_, N}] = F(foo),
+ ets:insert(foo, [{counter, N+1}]),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs2.erl
new file mode 100644
index 0000000000..3abb9f2fca
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_funs2.erl
@@ -0,0 +1,18 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the anonymous functions.
+
+-module(ets_insert_funs2).
+-export([start/0]).
+
+start() ->
+ F = fun(T, N) ->
+ ets:insert(T, [{counter, N+1}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ [{_, N}] = ets:lookup(foo, counter),
+ F(foo, N),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_new.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_new.erl
new file mode 100644
index 0000000000..63f3272912
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_new.erl
@@ -0,0 +1,15 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account multiple ets:new calls that might exist.
+
+-module(ets_insert_new).
+-export([test/0]).
+
+test() ->
+ T1 = ets:new(foo, [public]),
+ T2 = ets:new(bar, []),
+ ets:lookup(T2, counter),
+ aux(T1),
+ aux(T2).
+
+aux(Tab) ->
+ ets:insert(Tab, {counter, 1}).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_param.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_param.erl
new file mode 100644
index 0000000000..a479a31792
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/ets_insert_param.erl
@@ -0,0 +1,26 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination in higher order functions.
+
+-module(ets_insert_param).
+-export([start/1]).
+
+start(User) ->
+ Table = ets:new(table, [public]),
+ mod:process(Table),
+ [{_, Msg}] = ets:lookup(Table, welcome_msg),
+ case User of
+ root ->
+ ets:insert(Table, {welcome_msg, Msg ++ "root"}),
+ ets:insert(Table, {pass, Pass = generate_password(ets:lookup(Table, pass))
+ ++ generate_strong_password(ets:lookup(Table, pass))});
+ user ->
+ ets:insert(Table, {welcome_msg, Msg ++ "user"}),
+ ets:insert(Table, {pass, Pass = generate_password(ets:lookup(Table, pass))})
+ end,
+ io:format("\nYour new pass is ~w\n", [Pass]).
+
+generate_password([{_, N}]) ->
+ lists:map(fun (_) -> random:uniform(90)+$\s+1 end, lists:seq(1,N)).
+
+generate_strong_password([{_, N}]) ->
+ lists:map(fun (_) -> random:uniform(90)+$\s+1 end, lists:seq(1,(N rem 2) * 5)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/extract_translations.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/extract_translations.erl
new file mode 100644
index 0000000000..4bf6f1b198
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/extract_translations.erl
@@ -0,0 +1,294 @@
+%%%----------------------------------------------------------------------
+%%% File : extract_translations.erl
+%%% Author : Sergei Golovan <[email protected]>
+%%% Purpose : Auxiliary tool for interface/messages translators
+%%% Created : 23 Apr 2005 by Sergei Golovan <[email protected]>
+%%% Id : $Id: extract_translations.erl,v 1.1 2009/08/17 09:18:59 maria Exp $
+%%%----------------------------------------------------------------------
+
+-module(extract_translations).
+-author('[email protected]').
+
+-export([start/0]).
+
+-define(STATUS_SUCCESS, 0).
+-define(STATUS_ERROR, 1).
+-define(STATUS_USAGE, 2).
+
+-include_lib("kernel/include/file.hrl").
+
+
+start() ->
+ ets:new(translations, [named_table, public]),
+ ets:new(translations_obsolete, [named_table, public]),
+ ets:new(files, [named_table, public]),
+ ets:new(vars, [named_table, public]),
+ case init:get_plain_arguments() of
+ ["-srcmsg2po", Dir, File] ->
+ print_po_header(File),
+ Status = process(Dir, File, srcmsg2po),
+ halt(Status);
+ ["-unused", Dir, File] ->
+ Status = process(Dir, File, unused),
+ halt(Status);
+ [Dir, File] ->
+ Status = process(Dir, File, used),
+ halt(Status);
+ _ ->
+ print_usage(),
+ halt(?STATUS_USAGE)
+ end.
+
+
+process(Dir, File, Used) ->
+ case load_file(File) of
+ {error, Reason} ->
+ io:format("~s: ~s~n", [File, file:format_error(Reason)]),
+ ?STATUS_ERROR;
+ _ ->
+ FileList = find_src_files(Dir),
+ lists:foreach(
+ fun(F) ->
+ parse_file(Dir, F, Used)
+ end, FileList),
+ case Used of
+ unused ->
+ ets:foldl(fun({Key, _}, _) ->
+ io:format("~p~n", [Key])
+ end, ok, translations);
+ srcmsg2po ->
+ ets:foldl(fun({Key, Trans}, _) ->
+ print_translation_obsolete(Key, Trans)
+ end, ok, translations_obsolete);
+ _ ->
+ ok
+ end,
+ ?STATUS_SUCCESS
+ end.
+
+parse_file(Dir, File, Used) ->
+ ets:delete_all_objects(vars),
+ case epp:parse_file(File, [Dir, filename:dirname(File) | code:get_path()], []) of
+ {ok, Forms} ->
+ lists:foreach(
+ fun(F) ->
+ parse_form(Dir, File, F, Used)
+ end, Forms);
+ _ ->
+ ok
+ end.
+
+parse_form(Dir, File, Form, Used) ->
+ case Form of
+ %%{undefined, Something} ->
+ %% io:format("Undefined: ~p~n", [Something]);
+ {call,
+ _,
+ {remote, _, {atom, _, translate}, {atom, _, translate}},
+ [_, {string, Line, Str}]
+ } ->
+ process_string(Dir, File, Line, Str, Used);
+ {call,
+ _,
+ {remote, _, {atom, _, translate}, {atom, _, translate}},
+ [_, {var, _, Name}]
+ } ->
+ case ets:lookup(vars, Name) of
+ [{_Name, Value, Line}] ->
+ process_string(Dir, File, Line, Value, Used);
+ _ ->
+ ok
+ end;
+ {match,
+ _,
+ {var, _, Name},
+ {string, Line, Value}
+ } ->
+ ets:insert(vars, {Name, Value, Line});
+ L when is_list(L) ->
+ lists:foreach(
+ fun(F) ->
+ parse_form(Dir, File, F, Used)
+ end, L);
+ T when is_tuple(T) ->
+ lists:foreach(
+ fun(F) ->
+ parse_form(Dir, File, F, Used)
+ end, tuple_to_list(T));
+ _ ->
+ ok
+ end.
+
+process_string(_Dir, _File, _Line, "", _Used) ->
+ ok;
+
+process_string(_Dir, File, Line, Str, Used) ->
+ case {ets:lookup(translations, Str), Used} of
+ {[{_Key, _Trans}], unused} ->
+ ets:delete(translations, Str);
+ {[{_Key, _Trans}], used} ->
+ ok;
+ {[{_Key, Trans}], srcmsg2po} ->
+ ets:delete(translations_obsolete, Str),
+ print_translation(File, Line, Str, Trans);
+ {_, used} ->
+ case ets:lookup(files, File) of
+ [{_}] ->
+ ok;
+ _ ->
+ io:format("~n% ~s~n", [File]),
+ ets:insert(files, {File})
+ end,
+ case Str of
+ [] -> ok;
+ _ -> io:format("{~p, \"\"}.~n", [Str])
+ end,
+ ets:insert(translations, {Str, ""});
+ {_, srcmsg2po} ->
+ case ets:lookup(files, File) of
+ [{_}] ->
+ ok;
+ _ ->
+ ets:insert(files, {File})
+ end,
+ ets:insert(translations, {Str, ""}),
+ print_translation(File, Line, Str, "");
+ _ ->
+ ok
+ end.
+
+load_file(File) ->
+ case file:consult(File) of
+ {ok, Terms} ->
+ lists:foreach(
+ fun({Orig, Trans}) ->
+ case Trans of
+ "" ->
+ ok;
+ _ ->
+ ets:insert(translations, {Orig, Trans}),
+ ets:insert(translations_obsolete, {Orig, Trans})
+ end
+ end, Terms);
+ Err ->
+ Err
+ end.
+
+find_src_files(Dir) ->
+ case file:list_dir(Dir) of
+ {ok, FileList} ->
+ recurse_filelist(
+ lists:map(
+ fun(F) ->
+ filename:join(Dir, F)
+ end, FileList));
+ _ ->
+ []
+ end.
+
+recurse_filelist(FileList) ->
+ recurse_filelist(FileList, []).
+
+recurse_filelist([], Acc) ->
+ lists:reverse(Acc);
+
+recurse_filelist([H | T], Acc) ->
+ case file:read_file_info(H) of
+ {ok, #file_info{type = directory}} ->
+ recurse_filelist(T, lists:reverse(find_src_files(H)) ++ Acc);
+ {ok, #file_info{type = regular}} ->
+ case string:substr(H, string:len(H) - 3) of
+ ".erl" ->
+ recurse_filelist(T, [H | Acc]);
+ ".hrl" ->
+ recurse_filelist(T, [H | Acc]);
+ _ ->
+ recurse_filelist(T, Acc)
+ end;
+ _ ->
+ recurse_filelist(T, Acc)
+ end.
+
+
+print_usage() ->
+ io:format(
+ "Usage: extract_translations [-unused] dir file~n"
+ "~n"
+ "Example:~n"
+ " extract_translations . ./msgs/ru.msg~n"
+ ).
+
+
+%%%
+%%% Gettext
+%%%
+
+print_po_header(File) ->
+ MsgProps = get_msg_header_props(File),
+ {Language, [LastT | AddT]} = prepare_props(MsgProps),
+ application:load(ejabberd),
+ {ok, Version} = application:get_key(ejabberd, vsn),
+ print_po_header(Version, Language, LastT, AddT).
+
+get_msg_header_props(File) ->
+ {ok, F} = file:open(File, [read]),
+ Lines = get_msg_header_props(F, []),
+ file:close(F),
+ Lines.
+
+get_msg_header_props(F, Lines) ->
+ String = io:get_line(F, ""),
+ case io_lib:fread("% ", String) of
+ {ok, [], RemString} ->
+ case io_lib:fread("~s", RemString) of
+ {ok, [Key], Value} when Value /= "\n" ->
+ %% The first character in Value is a blankspace:
+ %% And the last characters are 'slash n'
+ ValueClean = string:substr(Value, 2, string:len(Value)-2),
+ get_msg_header_props(F, Lines ++ [{Key, ValueClean}]);
+ _ ->
+ get_msg_header_props(F, Lines)
+ end;
+ _ ->
+ Lines
+ end.
+
+prepare_props(MsgProps) ->
+ Language = proplists:get_value("Language:", MsgProps),
+ Authors = proplists:get_all_values("Author:", MsgProps),
+ {Language, Authors}.
+
+print_po_header(Version, Language, LastTranslator, AdditionalTranslatorsList) ->
+ AdditionalTranslatorsString = build_additional_translators(AdditionalTranslatorsList),
+ HeaderString =
+ "msgid \"\"\n"
+ "msgstr \"\"\n"
+ "\"Project-Id-Version: " ++ Version ++ "\\n\"\n"
+ ++ "\"X-Language: " ++ Language ++ "\\n\"\n"
+ "\"Last-Translator: " ++ LastTranslator ++ "\\n\"\n"
+ ++ AdditionalTranslatorsString ++
+ "\"MIME-Version: 1.0\\n\"\n"
+ "\"Content-Type: text/plain; charset=UTF-8\\n\"\n"
+ "\"Content-Transfer-Encoding: 8bit\\n\"\n",
+ io:format("~s~n", [HeaderString]).
+
+build_additional_translators(List) ->
+ lists:foldl(
+ fun(T, Str) ->
+ Str ++ "\"X-Additional-Translator: " ++ T ++ "\\n\"\n"
+ end,
+ "",
+ List).
+
+print_translation(File, Line, Str, StrT) ->
+ {ok, StrQ, _} = regexp:gsub(Str, "\"", "\\\""),
+ {ok, StrTQ, _} = regexp:gsub(StrT, "\"", "\\\""),
+ io:format("#: ~s:~p~nmsgid \"~s\"~nmsgstr \"~s\"~n~n", [File, Line, StrQ, StrTQ]).
+
+print_translation_obsolete(Str, StrT) ->
+ File = "unknown.erl",
+ Line = 1,
+ {ok, StrQ, _} = regexp:gsub(Str, "\"", "\\\""),
+ {ok, StrTQ, _} = regexp:gsub(StrT, "\"", "\\\""),
+ io:format("#: ~s:~p~n#~~ msgid \"~s\"~n#~~ msgstr \"~s\"~n~n", [File, Line, StrQ, StrTQ]).
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race1.erl
new file mode 100644
index 0000000000..74d17aab0c
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race1.erl
@@ -0,0 +1,33 @@
+%% This tests that the race condition detection between mnesia:dirty_read/
+%% mnesia:dirty_write is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(mnesia_diff_atoms_race1).
+-export([test/2]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+test(Eno, Raise) ->
+ {race(employee, Eno, Raise), no_race(employee, Eno, Raise)}.
+
+race(Tab, Eno, Raise) ->
+ [E] = mnesia:dirty_read(Tab, Eno),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ aux(Tab, New).
+
+no_race(Tab, Eno, Raise) ->
+ [E] = mnesia:dirty_read(Tab, Eno),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ AnotherTab = employer,
+ aux(AnotherTab, New).
+
+
+aux(Table, Record) ->
+ mnesia:dirty_write(Table, Record).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race2.erl
new file mode 100644
index 0000000000..e92405a673
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_diff_atoms_race2.erl
@@ -0,0 +1,37 @@
+%% This tests that the race condition detection between mnesia:dirty_read/
+%% mnesia:dirty_write is robust even when the functions are called with
+%% different atoms as arguments.
+
+-module(mnesia_diff_atoms_race2).
+-export([test/2]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+-record(employer, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+test(Eno, Raise) ->
+ {race(employee, Eno, Raise), no_race(employee, Eno, Raise)}.
+
+race(Tab, Eno, Raise) ->
+ [E] = mnesia:dirty_read(Tab, Eno),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ aux(New).
+
+no_race(Tab, Eno, Raise) ->
+ [E] = mnesia:dirty_read(Tab, Eno),
+ AnotherRecord = #employer{},
+ aux(AnotherRecord).
+
+aux(Record) ->
+ mnesia:dirty_write(Record).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_one_write_two.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_one_write_two.erl
new file mode 100644
index 0000000000..81e460be45
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_one_write_two.erl
@@ -0,0 +1,22 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account the argument types
+%% of the calls.
+
+-module(mnesia_dirty_read_one_write_two).
+-export([raise/2]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise) ->
+ [E] = mnesia:dirty_read({employee, Eno}),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(employee, New).
+
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_two_write_one.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_two_write_one.erl
new file mode 100644
index 0000000000..515e9f11de
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_two_write_one.erl
@@ -0,0 +1,22 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account the argument types
+%% of the calls.
+
+-module(mnesia_dirty_read_two_write_one).
+-export([raise/2]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise) ->
+ [E] = mnesia:dirty_read(employee, Eno),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(New).
+
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double1.erl
new file mode 100644
index 0000000000..2bd18e4772
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double1.erl
@@ -0,0 +1,25 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account multiple
+%% mnesia:dirty_writes that might exist.
+
+-module(mnesia_dirty_read_write_double1).
+-export([raise/3]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise, Room) ->
+ [E] = mnesia:dirty_read(employee, Eno),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(employee, New),
+ move(E, Room).
+
+move(E, Room) ->
+ New = E#employee{room_no = Room},
+ mnesia:dirty_write(employee, New).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double2.erl
new file mode 100644
index 0000000000..cdbfdc700a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double2.erl
@@ -0,0 +1,25 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account multiple
+%% mnesia:dirty_writes that might exist.
+
+-module(mnesia_dirty_read_write_double2).
+-export([raise/3]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise, Room) ->
+ [E] = mnesia:dirty_read({employee, Eno}),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(New),
+ move(E, Room).
+
+move(E, Room) ->
+ New = E#employee{room_no = Room},
+ mnesia:dirty_write(New).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double3.erl
new file mode 100644
index 0000000000..051524917e
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double3.erl
@@ -0,0 +1,25 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account multiple
+%% mnesia:dirty_writes that might exist.
+
+-module(mnesia_dirty_read_write_double3).
+-export([raise/3]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise, Room) ->
+ [E] = mnesia:dirty_read({employee, Eno}),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(employee, New),
+ move(E, Room).
+
+move(E, Room) ->
+ New = E#employee{room_no = Room},
+ mnesia:dirty_write(employee, New).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double4.erl
new file mode 100644
index 0000000000..96752a6045
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_double4.erl
@@ -0,0 +1,25 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account multiple
+%% mnesia:dirty_writes that might exist.
+
+-module(mnesia_dirty_read_write_double4).
+-export([raise/3]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise, Room) ->
+ [E] = mnesia:dirty_read(employee, Eno),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(New),
+ move(E, Room).
+
+move(E, Room) ->
+ New = E#employee{room_no = Room},
+ mnesia:dirty_write(New).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_one.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_one.erl
new file mode 100644
index 0000000000..7ff546a9ea
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_one.erl
@@ -0,0 +1,22 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account the argument types
+%% of the calls.
+
+-module(mnesia_dirty_read_write_one).
+-export([raise/2]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise) ->
+ [E] = mnesia:dirty_read({employee, Eno}),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(New).
+
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_two.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_two.erl
new file mode 100644
index 0000000000..10952ac86d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/mnesia_dirty_read_write_two.erl
@@ -0,0 +1,22 @@
+%% This tests the presence of possible races due to an mnesia:dirty_read/
+%% mnesia:dirty_write combination. It takes into account the argument types
+%% of the calls.
+
+-module(mnesia_dirty_read_write_two).
+-export([raise/2]).
+
+-record(employee, {emp_no,
+ name,
+ salary,
+ sex,
+ phone,
+ room_no}).
+
+
+raise(Eno, Raise) ->
+ [E] = mnesia:dirty_read(employee, Eno),
+ Salary = E#employee.salary + Raise,
+ New = E#employee{salary = Salary},
+ mnesia:dirty_write(employee, New).
+
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow1.erl
new file mode 100644
index 0000000000..e65f6c3e23
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow1.erl
@@ -0,0 +1,17 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination. It takes into account control flow that might exist.
+
+-module(whereis_control_flow1).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false -> register(AnAtom, Pid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow2.erl
new file mode 100644
index 0000000000..41039482c9
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow2.erl
@@ -0,0 +1,19 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination. It takes into account control flow that might exist.
+
+-module(whereis_control_flow2).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true ->
+ io:format("self",[]),
+ register(AnAtom, Pid);
+ false -> register(AnAtom, Pid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow3.erl
new file mode 100644
index 0000000000..87b2976165
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow3.erl
@@ -0,0 +1,25 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination. It takes into account control flow that might exist.
+
+-module(whereis_control_flow3).
+-export([start/3]).
+
+start(AnAtom, Fun, FunName) ->
+ Pid =
+ case FunName of
+ master ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end;
+ slave ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end
+ end,
+ register(AnAtom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow4.erl
new file mode 100644
index 0000000000..9292006fa8
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow4.erl
@@ -0,0 +1,29 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination. It takes into account control flow that might exist.
+
+-module(whereis_control_flow4).
+-export([start/1]).
+
+start(Fun) ->
+ case whereis(maria) of
+ undefined ->
+ Pid1 = spawn(Fun),
+ case Pid1 =:= self() of
+ true ->
+ case whereis(kostis) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ case Pid2 =:= self() of
+ true ->
+ register(maria, Pid1),
+ register(kostis, Pid2);
+ false -> ok
+ end;
+ P when is_pid(P) ->
+ ok
+ end;
+ false -> ok
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow5.erl
new file mode 100644
index 0000000000..8de9cb2dad
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow5.erl
@@ -0,0 +1,12 @@
+%% This tests the presence of possible races due to a whereis/unregister
+%% combination. It takes into account control flow that might exist.
+
+-module(whereis_control_flow5).
+-export([start/1]).
+
+start(AnAtom) ->
+ case whereis(AnAtom) of
+ undefined -> ok;
+ P when is_pid(P) ->
+ unregister(AnAtom)
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow6.erl
new file mode 100644
index 0000000000..03c5095a50
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_control_flow6.erl
@@ -0,0 +1,12 @@
+%% This tests the presence of possible races due to a whereis/unregister
+%% combination. It takes into account control flow that might exist.
+
+-module(whereis_control_flow6).
+-export([start/0]).
+
+start() ->
+ case whereis(kostis) of
+ undefined -> ok;
+ P when is_pid(P) ->
+ unregister(kostis)
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_no_race.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_no_race.erl
new file mode 100644
index 0000000000..dcadcb3683
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_no_race.erl
@@ -0,0 +1,24 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust even when the functions are called with different atoms
+%% as arguments.
+
+-module(whereis_diff_atoms_no_race).
+-export([test/0]).
+
+test() ->
+ Fun = fun () -> foo end,
+ {no_race(maria, Fun)}.
+
+no_race(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ AnotherAtom = kostis,
+ aux(AnotherAtom, Pid);
+ P when is_pid(P) ->
+ ok
+ end.
+
+aux(Atom, Pid) ->
+ register(Atom, Pid).
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_race.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_race.erl
new file mode 100644
index 0000000000..7e302247f8
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_atoms_race.erl
@@ -0,0 +1,35 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust even when the functions are called with different atoms
+%% as arguments.
+
+-module(whereis_diff_atoms_race).
+-export([test/0]). %, race/1, no_race/1]).
+
+test() ->
+ Fun = fun () -> foo end,
+ {race(maria, Fun), no_race(maria, Fun)}.
+
+race(AnAtom, Fun) ->
+ %AnAtom = maria,
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ aux(AnAtom, Pid);
+ P when is_pid(P) ->
+ ok
+ end.
+
+no_race(AnAtom, Fun) ->
+ %AnAtom = maria,
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ AnotherAtom = kostis,
+ aux(AnotherAtom, Pid);
+ P when is_pid(P) ->
+ ok
+ end.
+
+aux(Atom, Pid) ->
+ register(Atom, Pid).
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1.erl
new file mode 100644
index 0000000000..6a1c197c06
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions1).
+-export([start/2]).
+
+continue(Fun) ->
+ case whereis(master) of
+ undefined ->
+ register(master, spawn(Fun));
+ _ -> ok
+ end.
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ register(AnAtom, Pid);
+ _ ->
+ ok
+ end,
+ continue(Fun).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_nested.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_nested.erl
new file mode 100644
index 0000000000..0a77c78ba3
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_nested.erl
@@ -0,0 +1,23 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions1_nested).
+-export([test/2]).
+
+test(AnAtom, Fun) ->
+ start(AnAtom, Fun).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ race1(AnAtom, Pid);
+ P when is_pid(P) ->
+ true
+ end.
+
+race1(Atom, Pid) ->
+ race2(Atom, Pid).
+
+race2(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_pathsens.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_pathsens.erl
new file mode 100644
index 0000000000..53955a7fa1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_pathsens.erl
@@ -0,0 +1,32 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+%% It takes into account control flow that might exist.
+
+-module(whereis_diff_functions1_pathsens).
+-export([test/1]).
+
+test(FunName) ->
+ start(kostis, mod:function(), FunName).
+
+start(AnAtom, Fun, FunName) ->
+ Pid =
+ case FunName of
+ master ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end;
+ slave ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end
+ end,
+ race(AnAtom, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_twice.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_twice.erl
new file mode 100644
index 0000000000..2e87caff4f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions1_twice.erl
@@ -0,0 +1,30 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having multiple calls in separate functions.
+
+-module(whereis_diff_functions1_twice).
+-export([test/2]).
+
+test(AnAtom, Fun) ->
+ start(AnAtom, Fun).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid1 = spawn(Fun),
+ race(AnAtom, Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ race_again(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end;
+ P when is_pid(P) ->
+ true
+ end.
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
+
+race_again(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2.erl
new file mode 100644
index 0000000000..1ec8d194be
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2.erl
@@ -0,0 +1,25 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions2).
+-export([test/0]).
+
+test() ->
+ start(kostis, mod:function()).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ race(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_nested.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_nested.erl
new file mode 100644
index 0000000000..415f73d555
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_nested.erl
@@ -0,0 +1,20 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions2_nested).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ race1(AnAtom, Pid);
+ P when is_pid(P) ->
+ true
+ end.
+
+race1(Atom, Pid) ->
+ race2(Atom, Pid).
+
+race2(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_pathsens.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_pathsens.erl
new file mode 100644
index 0000000000..cbd9a7d016
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_pathsens.erl
@@ -0,0 +1,29 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+%% It takes into account control flow that might exist.
+
+-module(whereis_diff_functions2_pathsens).
+-export([race/4]).
+
+start(AnAtom, Fun, FunName) ->
+ Pid =
+ case FunName of
+ master ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end;
+ slave ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end
+ end.
+
+race(Atom, Fun, FunName, Pid) ->
+ start(Atom, Fun, FunName),
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_twice.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_twice.erl
new file mode 100644
index 0000000000..d8e4987758
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions2_twice.erl
@@ -0,0 +1,27 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having multiple calls in separate functions.
+
+-module(whereis_diff_functions2_twice).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid1 = spawn(Fun),
+ race(AnAtom, Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ race_again(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end;
+ P when is_pid(P) ->
+ true
+ end.
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
+
+race_again(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3.erl
new file mode 100644
index 0000000000..7d4e0905ef
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions3).
+-export([start/1]).
+
+start(AnAtom) ->
+ register(AnAtom, race(AnAtom)).
+
+race(Atom) ->
+ whereis(Atom).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_nested.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_nested.erl
new file mode 100644
index 0000000000..b4129dc83b
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_nested.erl
@@ -0,0 +1,21 @@
+%% This tests that the race condition detection between whereis/unregister
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions3_nested).
+-export([test/1]).
+
+test(AnAtom) ->
+ start(AnAtom).
+
+start(AnAtom) ->
+ case whereis(AnAtom) of
+ undefined -> true;
+ P when is_pid(P) ->
+ race1(AnAtom)
+ end.
+
+race1(Atom) ->
+ race2(Atom).
+
+race2(Atom) ->
+ unregister(Atom).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_pathsens.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_pathsens.erl
new file mode 100644
index 0000000000..f06e43024b
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions3_pathsens.erl
@@ -0,0 +1,29 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+%% It takes into account control flow that might exist.
+
+-module(whereis_diff_functions3_pathsens).
+-export([start/3]).
+
+start(AnAtom, Fun, FunName) ->
+ Pid =
+ case FunName of
+ master ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end;
+ slave ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end
+ end,
+ race(AnAtom, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions4.erl
new file mode 100644
index 0000000000..334485921c
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions4.erl
@@ -0,0 +1,32 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions4).
+-export([test/2]).
+
+test(AnAtom, Fun) ->
+ start(AnAtom, Fun).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ race(AnAtom, Pid2),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid3 = spawn(Fun),
+ race(AnAtom, Pid3);
+ P when is_pid(P) ->
+ true
+ end;
+ P when is_pid(P) ->
+ true
+ end.
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions5.erl
new file mode 100644
index 0000000000..b4459273f9
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions5.erl
@@ -0,0 +1,22 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions5).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ race(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions6.erl
new file mode 100644
index 0000000000..ccf0f5e127
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_functions6.erl
@@ -0,0 +1,29 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions.
+
+-module(whereis_diff_functions6).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ race(AnAtom, Pid2),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid3 = spawn(Fun),
+ race(AnAtom, Pid3);
+ P when is_pid(P) ->
+ true
+ end;
+ P when is_pid(P) ->
+ true
+ end.
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules1.erl
new file mode 100644
index 0000000000..00cb29cec0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules1.erl
@@ -0,0 +1,16 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules1).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_diff_modules2:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ whereis_diff_modules2:race(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules2.erl
new file mode 100644
index 0000000000..dabb7fd2da
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1/whereis_diff_modules2.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules2).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules1_pathsens.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules1_pathsens.erl
new file mode 100644
index 0000000000..3dbb645e65
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules1_pathsens.erl
@@ -0,0 +1,26 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules (backward analysis).
+%% It takes into account control flow that might exist.
+
+-module(whereis_diff_modules1_pathsens).
+-export([start/3]).
+
+start(AnAtom, Fun, FunName) ->
+ Pid =
+ case FunName of
+ master ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end;
+ slave ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end
+ end,
+ whereis_diff_modules2_pathsens:race(AnAtom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules2_pathsens.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules2_pathsens.erl
new file mode 100644
index 0000000000..99331b81b1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_pathsens/whereis_diff_modules2_pathsens.erl
@@ -0,0 +1,12 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules (backward analysis).
+%% It takes into account control flow that might exist.
+
+-module(whereis_diff_modules2_pathsens).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules1_rec.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules1_rec.erl
new file mode 100644
index 0000000000..a397954eea
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules1_rec.erl
@@ -0,0 +1,22 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in an indirectly recursive inter-modular function.
+
+-module(whereis_diff_modules1_rec).
+-export([start/4]).
+
+start(AnAtom, NextAtom, Fun, Id) ->
+ case AnAtom of
+ undefined -> register(start, Id);
+ _ -> register(AnAtom, Id)
+ end,
+ case whereis(NextAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ whereis_diff_modules2_rec:continue(NextAtom, mod:next(), Pid, Id)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules2_rec.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules2_rec.erl
new file mode 100644
index 0000000000..4b46b4a8e5
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules1_rec/whereis_diff_modules2_rec.erl
@@ -0,0 +1,8 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in a recursive function.
+
+-module(whereis_diff_modules2_rec).
+-export([continue/4]).
+
+continue(Atom, NextAtom, Fun, Id) ->
+ whereis_diff_modules1_rec:start(Atom, NextAtom, Fun, Id).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules3.erl
new file mode 100644
index 0000000000..60b5a1d378
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules3.erl
@@ -0,0 +1,8 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules3).
+-export([start/1]).
+
+start(AnAtom) ->
+ register(AnAtom, whereis_diff_modules4:race(AnAtom)).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules4.erl
new file mode 100644
index 0000000000..6ab9a4d824
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2/whereis_diff_modules4.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules4).
+-export([no_race/1, race/1]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom) ->
+ whereis(Atom).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules3_pathsens.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules3_pathsens.erl
new file mode 100644
index 0000000000..1eaa954fa1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules3_pathsens.erl
@@ -0,0 +1,25 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules (forward analysis).
+%% It takes into account control flow that might exist.
+
+-module(whereis_diff_modules3_pathsens).
+-export([start/3]).
+
+start(AnAtom, Fun, FunName) ->
+ Pid =
+ case FunName of
+ master ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end;
+ slave ->
+ case whereis(AnAtom) of
+ undefined ->
+ spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules4_pathsens.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules4_pathsens.erl
new file mode 100644
index 0000000000..f23a63c8f0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_pathsens/whereis_diff_modules4_pathsens.erl
@@ -0,0 +1,13 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules (forward analysis).
+%% It takes into account control flow that might exist.
+
+-module(whereis_diff_modules4_pathsens).
+-export([no_race/1, race/4]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Fun, FunName, Pid) ->
+ whereis_diff_modules3_pathsens:start(Atom, Fun, FunName),
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules3_rec.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules3_rec.erl
new file mode 100644
index 0000000000..0320140768
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules3_rec.erl
@@ -0,0 +1,25 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in an indirectly recursive inter-modular function.
+
+-module(whereis_diff_modules3_rec).
+-export([test/0, start/4]).
+
+test() ->
+ start(undefined, second, mod:f(), self()).
+
+start(AnAtom, NextAtom, Fun, Id) ->
+ case AnAtom of
+ undefined -> register(start, Id);
+ _ -> register(AnAtom, Id)
+ end,
+ case whereis(NextAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ whereis_diff_modules4_rec:continue(NextAtom, mod:next(), Pid, Id)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules4_rec.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules4_rec.erl
new file mode 100644
index 0000000000..d49c59ed5c
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules2_rec/whereis_diff_modules4_rec.erl
@@ -0,0 +1,8 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in a recursive function.
+
+-module(whereis_diff_modules4_rec).
+-export([continue/4]).
+
+continue(Atom, NextAtom, Fun, Id) ->
+ whereis_diff_modules3_rec:start(Atom, NextAtom, Fun, Id).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules5.erl
new file mode 100644
index 0000000000..591732aa31
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules5.erl
@@ -0,0 +1,23 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules5).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_diff_modules6:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ whereis_diff_modules6:race(AnAtom, Pid2),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid3 = spawn(Fun),
+ whereis_diff_modules6:race(AnAtom, Pid3);
+ P when is_pid(P) ->
+ true
+ end;
+ P when is_pid(P) ->
+ true
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules6.erl
new file mode 100644
index 0000000000..ec6c245c9a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules3/whereis_diff_modules6.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules6).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules1_nested.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules1_nested.erl
new file mode 100644
index 0000000000..a25d2f8784
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules1_nested.erl
@@ -0,0 +1,14 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules1_nested).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ whereis_diff_modules2_nested:race(AnAtom, Pid);
+ P when is_pid(P) ->
+ true
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules2_nested.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules2_nested.erl
new file mode 100644
index 0000000000..4b4c058884
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules2_nested.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules2_nested).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ whereis_diff_modules3_nested:race(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules3_nested.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules3_nested.erl
new file mode 100644
index 0000000000..5412660b16
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_nested/whereis_diff_modules3_nested.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules3_nested).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules1_twice.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules1_twice.erl
new file mode 100644
index 0000000000..92f2cb1fbc
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules1_twice.erl
@@ -0,0 +1,21 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having multiple calls in separate modules.
+
+-module(whereis_diff_modules1_twice).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid1 = spawn(Fun),
+ whereis_diff_modules2_twice:race(AnAtom, Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ whereis_diff_modules2_twice:race_again(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end;
+ P when is_pid(P) ->
+ true
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules2_twice.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules2_twice.erl
new file mode 100644
index 0000000000..afe5214648
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_modules_twice/whereis_diff_modules2_twice.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate modules.
+
+-module(whereis_diff_modules2_twice).
+-export([race/2, race_again/2]).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
+
+race_again(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_no_race.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_no_race.erl
new file mode 100644
index 0000000000..16f1d91490
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_no_race.erl
@@ -0,0 +1,13 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust even when the functions are called with different variables
+%% as arguments.
+
+-module(whereis_diff_vars_no_race).
+-export([test/3]).
+
+test(AnAtom, AnotherAtom, Pid) ->
+ {aux(AnAtom, Pid), aux(AnotherAtom, Pid)}.
+
+aux(Atom, Pid) ->
+ register(Atom, Pid),
+ whereis(Atom).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_race.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_race.erl
new file mode 100644
index 0000000000..7382d184dc
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_diff_vars_race.erl
@@ -0,0 +1,19 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust even when the functions are called with different variables
+%% as arguments.
+
+-module(whereis_diff_vars_race).
+-export([test/2]).
+
+test(AnAtom, AnotherAtom) ->
+ Fun = fun () -> foo end,
+ {aux(AnAtom, AnotherAtom, Fun), aux(AnotherAtom, AnAtom, Fun)}.
+
+aux(Atom1, Atom2, Fun) ->
+ case whereis(Atom1) of
+ undefined ->
+ Pid = spawn(Fun),
+ register(Atom2, Pid);
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module1.erl
new file mode 100644
index 0000000000..677551c99d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module1.erl
@@ -0,0 +1,19 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module1).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_intra_inter_module2:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ continue(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
+continue(Atom, Pid) ->
+ whereis_intra_inter_module2:race(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module2.erl
new file mode 100644
index 0000000000..cc2efbecd0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module1/whereis_intra_inter_module2.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module2).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module3.erl
new file mode 100644
index 0000000000..c8103db122
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module3.erl
@@ -0,0 +1,16 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module3).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_intra_inter_module4:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ whereis_intra_inter_module4:race(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module4.erl
new file mode 100644
index 0000000000..9769f312a8
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module2/whereis_intra_inter_module4.erl
@@ -0,0 +1,14 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module4).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ continue(Atom, Pid).
+
+continue(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module5.erl
new file mode 100644
index 0000000000..2a29779153
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module5.erl
@@ -0,0 +1,19 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module5).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_intra_inter_module6:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ continue(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
+continue(Atom, Pid) ->
+ whereis_intra_inter_module6:race(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module6.erl
new file mode 100644
index 0000000000..92a589f97f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module3/whereis_intra_inter_module6.erl
@@ -0,0 +1,14 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module6).
+-export([no_race/1, race/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ continue(Atom, Pid).
+
+continue(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module7.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module7.erl
new file mode 100644
index 0000000000..1f702e7af3
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module7.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module7).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ register(AnAtom, continue(AnAtom, Fun)).
+
+continue(AnAtom, Fun) ->
+ whereis_intra_inter_module8:continue(AnAtom, Fun).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module8.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module8.erl
new file mode 100644
index 0000000000..581817308b
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module4/whereis_intra_inter_module8.erl
@@ -0,0 +1,13 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module8).
+-export([continue/2]).
+
+continue(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module10.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module10.erl
new file mode 100644
index 0000000000..7ed50ea742
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module10.erl
@@ -0,0 +1,16 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module10).
+-export([continue/2]).
+
+continue(AnAtom, Fun) ->
+ aux(AnAtom, Fun).
+
+aux(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module9.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module9.erl
new file mode 100644
index 0000000000..5c5d92b770
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module5/whereis_intra_inter_module9.erl
@@ -0,0 +1,11 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module9).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ register(AnAtom, continue(AnAtom, Fun)).
+
+continue(AnAtom, Fun) ->
+ whereis_intra_inter_module10:continue(AnAtom, Fun).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module11.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module11.erl
new file mode 100644
index 0000000000..82abe2f4a8
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module11.erl
@@ -0,0 +1,27 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module11).
+-export([start/2, start_again/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_intra_inter_module12:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ whereis_intra_inter_module12:race(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
+start_again(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_intra_inter_module12:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ whereis_intra_inter_module12:continue(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module12.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module12.erl
new file mode 100644
index 0000000000..2160780d8e
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module6/whereis_intra_inter_module12.erl
@@ -0,0 +1,14 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module12).
+-export([no_race/1, race/2, continue/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ continue(Atom, Pid).
+
+continue(Atom, Pid) ->
+ register(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module13.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module13.erl
new file mode 100644
index 0000000000..3cd5cc6fa6
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module13.erl
@@ -0,0 +1,19 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module13).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_intra_inter_module14:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ continue(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
+continue(Atom, Pid) ->
+ whereis_intra_inter_module14:race(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module14.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module14.erl
new file mode 100644
index 0000000000..2de6c91985
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module7/whereis_intra_inter_module14.erl
@@ -0,0 +1,23 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module14).
+-export([no_race/1, race/2, start/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ race(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module15.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module15.erl
new file mode 100644
index 0000000000..c60d166fa9
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module15.erl
@@ -0,0 +1,19 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module15).
+-export([start/2, continue/2]).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ whereis_intra_inter_module16:no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ continue(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
+continue(Atom, Pid) ->
+ whereis_intra_inter_module16:race(Atom, Pid).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module16.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module16.erl
new file mode 100644
index 0000000000..6c170dc851
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_intra_inter_module8/whereis_intra_inter_module16.erl
@@ -0,0 +1,23 @@
+%% This tests that the race condition detection between whereis/register
+%% is robust w.r.t. having the calls in separate functions and modules.
+
+-module(whereis_intra_inter_module16).
+-export([no_race/1, race/2, start/2]).
+
+no_race(Pid) ->
+ register(master, Pid).
+
+race(Atom, Pid) ->
+ register(Atom, Pid).
+
+start(AnAtom, Fun) ->
+ Pid1 = spawn(Fun),
+ no_race(Pid1),
+ case whereis(AnAtom) of
+ undefined ->
+ Pid2 = spawn(Fun),
+ whereis_intra_inter_module15:continue(AnAtom, Pid2);
+ P when is_pid(P) ->
+ true
+ end.
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param.erl
new file mode 100644
index 0000000000..7bcde321a1
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param.erl
@@ -0,0 +1,16 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination in higher order functions.
+
+-module(whereis_param).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ register(AnAtom, continue(AnAtom, Fun)).
+
+continue(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module1.erl
new file mode 100644
index 0000000000..ab7c9b4cf9
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module1.erl
@@ -0,0 +1,9 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination in higher order functions and inter-module calls.
+
+-module(whereis_param_inter_module1).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ register(AnAtom, whereis_param_inter_module2:continue(AnAtom, Fun)).
+
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module2.erl
new file mode 100644
index 0000000000..61252add9a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_param_inter_module/whereis_param_inter_module2.erl
@@ -0,0 +1,13 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination in higher order functions and inter-module calls.
+
+-module(whereis_param_inter_module2).
+-export([continue/2]).
+
+continue(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun);
+ P when is_pid(P) ->
+ P
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function1.erl
new file mode 100644
index 0000000000..c8095fbf4c
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function1.erl
@@ -0,0 +1,19 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination in a recursive function.
+
+-module(whereis_rec_function1).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ register(AnAtom, Pid),
+ start(AnAtom, Fun)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function2.erl
new file mode 100644
index 0000000000..2721c9e19c
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function2.erl
@@ -0,0 +1,24 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in a recursive function.
+
+-module(whereis_rec_function2).
+-export([test/0]).
+
+test() ->
+ start(undefined, second, mod:f(), self()).
+
+start(AnAtom, NextAtom, Fun, Id) ->
+ case AnAtom of
+ undefined -> register(start, Id);
+ _ -> register(AnAtom, Id)
+ end,
+ case whereis(NextAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false -> start(NextAtom, mod:next(), Pid, Id)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function3.erl
new file mode 100644
index 0000000000..e101f34fba
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function3.erl
@@ -0,0 +1,27 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in a recursive function.
+
+-module(whereis_rec_function3).
+-export([test/0]).
+
+test() ->
+ start(undefined, second, mod:f(), self()).
+
+start(AnAtom, NextAtom, Fun, Id) ->
+ case AnAtom of
+ undefined -> register(start, Id);
+ _ -> register(AnAtom, Id)
+ end,
+ Pid =
+ case whereis(NextAtom) of
+ undefined -> spawn(Fun);
+ P1 when is_pid(P1) -> P1
+ end,
+ case whereis(NextAtom) of
+ undefined ->
+ case Pid =:= self() of
+ true -> ok;
+ false -> start(NextAtom, mod:next(), Pid, Id), io:format("", [])
+ end;
+ P2 when is_pid(P2) -> ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function4.erl
new file mode 100644
index 0000000000..4894d3397b
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function4.erl
@@ -0,0 +1,27 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in an indirectly recursive function.
+
+-module(whereis_rec_function4).
+-export([test/0]).
+
+test() ->
+ start(undefined, second, mod:f(), self()).
+
+start(AnAtom, NextAtom, Fun, Id) ->
+ case AnAtom of
+ undefined -> register(start, Id);
+ _ -> register(AnAtom, Id)
+ end,
+ case whereis(NextAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false -> continue(NextAtom, mod:next(), Pid, Id)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
+
+continue(Atom, NextAtom, Fun, Id) ->
+ start(Atom, NextAtom, Fun, Id).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function5.erl
new file mode 100644
index 0000000000..d821f829a2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function5.erl
@@ -0,0 +1,21 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in a recursive function.
+
+-module(whereis_rec_function5).
+-export([start/4]).
+
+start(AnAtom, NextAtom, Fun, Id) ->
+ case AnAtom of
+ undefined -> register(start, Id);
+ _ -> register(AnAtom, Id)
+ end,
+ case whereis(NextAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false -> start(NextAtom, mod:next(), Pid, Id)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function6.erl
new file mode 100644
index 0000000000..4ec4baf0be
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function6.erl
@@ -0,0 +1,24 @@
+%% This tests the presence of possible races due to a register/whereis
+%% combination in an indirectly recursive function.
+
+-module(whereis_rec_function6).
+-export([start/4]).
+
+start(AnAtom, NextAtom, Fun, Id) ->
+ case AnAtom of
+ undefined -> register(start, Id);
+ _ -> register(AnAtom, Id)
+ end,
+ case whereis(NextAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false -> continue(NextAtom, mod:next(), Pid, Id)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
+
+continue(Atom, NextAtom, Fun, Id) ->
+ start(Atom, NextAtom, Fun, Id).
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function7.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function7.erl
new file mode 100644
index 0000000000..7667443117
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function7.erl
@@ -0,0 +1,19 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination in a recursive function.
+
+-module(whereis_rec_function7).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ start(AnAtom, Fun),
+ register(AnAtom, Pid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function8.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function8.erl
new file mode 100644
index 0000000000..a06fb75f64
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_rec_function8.erl
@@ -0,0 +1,22 @@
+%% This tests the presence of possible races due to a whereis/register
+%% combination in a recursive function.
+
+-module(whereis_rec_function8).
+-export([test/2]).
+
+test(AnAtom, Fun) ->
+ start(AnAtom, Fun).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ start(AnAtom, Fun),
+ register(AnAtom, Pid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_try_catch.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_try_catch.erl
new file mode 100644
index 0000000000..9c8daf8d8c
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_try_catch.erl
@@ -0,0 +1,25 @@
+% This tests that warnings do appear when a whereis/register combination
+% is handled by try/catch.
+
+-module(whereis_try_catch).
+-export([race/1, no_race/1]).
+
+race(Pid) ->
+ case whereis(master) of
+ undefined ->
+ try
+ io:format("exception", [])
+ catch
+ _ -> register(master, Pid)
+ end
+ end.
+
+no_race(Pid) ->
+ case whereis(master) of
+ undefined ->
+ try
+ register(master, Pid)
+ catch
+ _ -> io:format("exception", [])
+ end
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars1.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars1.erl
new file mode 100644
index 0000000000..9b249e72be
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars1.erl
@@ -0,0 +1,17 @@
+%% This tests that no warnings appear when there is no specific
+%% information about the types and the variables are not bound.
+
+-module(whereis_vars1).
+-export([start/3]).
+
+start(AnAtom, OtherAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false -> register(OtherAtom, Pid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars10.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars10.erl
new file mode 100644
index 0000000000..5c1896d6b4
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars10.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars10).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom =/= OtherAtom of
+ true -> ok;
+ false -> register(OtherAtom, Pid)
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars11.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars11.erl
new file mode 100644
index 0000000000..dc8551b3f2
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars11.erl
@@ -0,0 +1,22 @@
+%% This tests that no warnings appear when there is no specific
+%% information about the types and the variables are not bound.
+
+-module(whereis_vars11).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom of
+ OtherAtom -> ok;
+ _Other -> register(OtherAtom, Pid)
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars12.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars12.erl
new file mode 100644
index 0000000000..38b0dc5d04
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars12.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars12).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ if
+ AnAtom =:= OtherAtom -> register(OtherAtom, Pid);
+ AnAtom =/= OtherAtom -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars13.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars13.erl
new file mode 100644
index 0000000000..3a04bba02f
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars13.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars13).
+-export([start/3]).
+
+start(AnAtom, APid, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ if
+ {AnAtom, Pid} =:= {OtherAtom, APid} -> register(OtherAtom, APid);
+ {AnAtom, Pid} =/= {OtherAtom, APid} -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars14.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars14.erl
new file mode 100644
index 0000000000..c688847551
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars14.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars14).
+-export([start/3]).
+
+start(AnAtom, APid, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ if
+ [AnAtom, Pid] =:= [OtherAtom, APid] -> register(OtherAtom, APid);
+ [AnAtom, Pid] =/= [OtherAtom, APid] -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars15.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars15.erl
new file mode 100644
index 0000000000..4b3a72537e
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars15.erl
@@ -0,0 +1,23 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars15).
+-export([start/3]).
+
+start(AnAtom, OtherAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom of
+ maria -> ok;
+ kostis when AnAtom =:= OtherAtom ->
+ register(OtherAtom, Pid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars16.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars16.erl
new file mode 100644
index 0000000000..7badb8df22
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars16.erl
@@ -0,0 +1,23 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars16).
+-export([start/4]).
+
+start(AnAtom, OtherAtom, APid, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom of
+ maria -> ok;
+ kostis when {AnAtom, Pid} =:= {OtherAtom, APid} ->
+ register(OtherAtom, APid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars17.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars17.erl
new file mode 100644
index 0000000000..bc7ef5e980
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars17.erl
@@ -0,0 +1,23 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars17).
+-export([start/4]).
+
+start(AnAtom, OtherAtom, APid, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom of
+ maria -> ok;
+ kostis when [AnAtom, Pid] =:= [OtherAtom, APid] ->
+ register(OtherAtom, APid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars18.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars18.erl
new file mode 100644
index 0000000000..06416fa987
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars18.erl
@@ -0,0 +1,22 @@
+%% This tests that no warnings appear when there is no specific
+%% information about the types and the variables are not bound.
+
+-module(whereis_vars18).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom =:= OtherAtom of
+ true -> ok;
+ false -> register(OtherAtom, Pid)
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars19.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars19.erl
new file mode 100644
index 0000000000..ae5b28e42d
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars19.erl
@@ -0,0 +1,23 @@
+%% This tests that no warnings appear when there is no specific
+%% information about the types and the variables are not bound.
+
+-module(whereis_vars19).
+-export([start/3]).
+
+start(AnAtom, OtherAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom of
+ maria -> ok;
+ kostis when AnAtom =/= OtherAtom ->
+ register(OtherAtom, Pid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars2.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars2.erl
new file mode 100644
index 0000000000..bafb5d4644
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars2.erl
@@ -0,0 +1,18 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars2).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = AnAtom,
+ case Pid =:= self() of
+ true -> ok;
+ false -> register(OtherAtom, Pid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars20.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars20.erl
new file mode 100644
index 0000000000..87c6caadf0
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars20.erl
@@ -0,0 +1,22 @@
+%% This tests that no warnings appear when there is no specific
+%% information about the types and the variables are not bound.
+
+-module(whereis_vars20).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ if
+ AnAtom =:= OtherAtom -> ok;
+ AnAtom =/= OtherAtom -> register(OtherAtom, Pid)
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars21.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars21.erl
new file mode 100644
index 0000000000..73d22d3467
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars21.erl
@@ -0,0 +1,23 @@
+%% This tests that no warnings appear when there is no specific
+%% information about the types and the variables are not bound.
+
+-module(whereis_vars21).
+-export([start/3]).
+
+start(AnAtom, OtherAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom of
+ maria when AnAtom =/= OtherAtom -> ok;
+ kostis when AnAtom =/= OtherAtom ->
+ register(OtherAtom, Pid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars22.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars22.erl
new file mode 100644
index 0000000000..dd16928e33
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars22.erl
@@ -0,0 +1,27 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars22).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ Same =
+ case AnAtom of
+ OtherAtom -> true;
+ _Other -> false
+ end,
+ case Same of
+ true -> register(OtherAtom, Pid);
+ false -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars3.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars3.erl
new file mode 100644
index 0000000000..16c9a6c8bc
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars3.erl
@@ -0,0 +1,18 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars3).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ {OtherAtom, APid} = {AnAtom, Pid},
+ case Pid =:= self() of
+ true -> ok;
+ false -> register(OtherAtom, APid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars4.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars4.erl
new file mode 100644
index 0000000000..da5b329ca9
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars4.erl
@@ -0,0 +1,18 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars4).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ [OtherAtom, APid] = [AnAtom, Pid],
+ case Pid =:= self() of
+ true -> ok;
+ false -> register(OtherAtom, APid)
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars5.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars5.erl
new file mode 100644
index 0000000000..dff8646ea8
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars5.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars5).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom of
+ OtherAtom -> register(OtherAtom, Pid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars6.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars6.erl
new file mode 100644
index 0000000000..cf22ab1883
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars6.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars6).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case {AnAtom, Pid} of
+ {OtherAtom, APid} -> register(OtherAtom, APid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars7.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars7.erl
new file mode 100644
index 0000000000..4bce53982a
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars7.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars7).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case [AnAtom, Pid] of
+ [OtherAtom, APid] -> register(OtherAtom, APid);
+ _Other -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars8.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars8.erl
new file mode 100644
index 0000000000..937b83cf02
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars8.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars8).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom =:= OtherAtom of
+ true -> register(OtherAtom, Pid);
+ false -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars9.erl b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars9.erl
new file mode 100644
index 0000000000..9beb67ca38
--- /dev/null
+++ b/lib/dialyzer/test/race_tests_SUITE_data/src/whereis_vars9.erl
@@ -0,0 +1,22 @@
+%% This tests that warnings do appear when there is no specific
+%% information about the types and the variables are bound.
+
+-module(whereis_vars9).
+-export([start/2]).
+
+start(AnAtom, Fun) ->
+ case whereis(AnAtom) of
+ undefined ->
+ Pid = spawn(Fun),
+ OtherAtom = kostis,
+ case Pid =:= self() of
+ true -> ok;
+ false ->
+ case AnAtom == OtherAtom of
+ true -> register(OtherAtom, Pid);
+ false -> ok
+ end
+ end;
+ P when is_pid(P) ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/remake b/lib/dialyzer/test/remake
new file mode 100755
index 0000000000..654bdd9e88
--- /dev/null
+++ b/lib/dialyzer/test/remake
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+erlc +warn_exported_vars +warn_unused_import +warn_untyped_record +warn_missing_spec dialyzer_common.erl file_utils.erl
+if [ -n "$1" ]; then
+ erl -noshell -run dialyzer_common create_suite "$1" -s erlang halt
+else
+ erl -noshell -run dialyzer_common create_all_suites -s erlang halt
+fi
+rm dialyzer_common.beam file_utils.beam \ No newline at end of file
diff --git a/lib/dialyzer/test/small_tests_SUITE.erl b/lib/dialyzer/test/small_tests_SUITE.erl
new file mode 100644
index 0000000000..21a2c76160
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE.erl
@@ -0,0 +1,483 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(small_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([small_tests_SUITE_consistency/1, app_call/1, appmon_place/1,
+ areq/1, atom_call/1, atom_guard/1, atom_widen/1,
+ bs_fail_constr/1, bs_utf8/1, cerl_hipeify/1, comm_layer/1,
+ compare1/1, confusing_warning/1, contract2/1, contract3/1,
+ contract5/1, disj_norm_form/1, eqeq/1, ets_select/1,
+ exhaust_case/1, failing_guard1/1, flatten/1, fun_app/1,
+ fun_ref_match/1, fun_ref_record/1, gencall/1, gs_make/1,
+ inf_loop2/1, letrec1/1, list_match/1, lzip/1, make_tuple/1,
+ minus_minus/1, mod_info/1, my_filter/1, my_sofs/1, no_match/1,
+ no_unused_fun/1, no_unused_fun2/1, non_existing/1,
+ not_guard_crash/1, or_bug/1, orelsebug/1, orelsebug2/1,
+ overloaded1/1, port_info_test/1, process_info_test/1, pubsub/1,
+ receive1/1, record_construct/1, record_pat/1,
+ record_send_test/1, record_test/1, recursive_types1/1,
+ recursive_types2/1, recursive_types3/1, recursive_types4/1,
+ recursive_types5/1, recursive_types6/1, recursive_types7/1,
+ refine_bug1/1, toth/1, trec/1, try1/1, tuple1/1,
+ unsafe_beamcode_bug/1, unused_cases/1, unused_clauses/1,
+ zero_tuple/1]).
+
+suite() ->
+ [{timetrap, {minutes, 1}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, []}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [small_tests_SUITE_consistency,app_call,appmon_place,areq,atom_call,
+ atom_guard,atom_widen,bs_fail_constr,bs_utf8,cerl_hipeify,comm_layer,
+ compare1,confusing_warning,contract2,contract3,contract5,disj_norm_form,
+ eqeq,ets_select,exhaust_case,failing_guard1,flatten,fun_app,fun_ref_match,
+ fun_ref_record,gencall,gs_make,inf_loop2,letrec1,list_match,lzip,
+ make_tuple,minus_minus,mod_info,my_filter,my_sofs,no_match,no_unused_fun,
+ no_unused_fun2,non_existing,not_guard_crash,or_bug,orelsebug,orelsebug2,
+ overloaded1,port_info_test,process_info_test,pubsub,receive1,
+ record_construct,record_pat,record_send_test,record_test,recursive_types1,
+ recursive_types2,recursive_types3,recursive_types4,recursive_types5,
+ recursive_types6,recursive_types7,refine_bug1,toth,trec,try1,tuple1,
+ unsafe_beamcode_bug,unused_cases,unused_clauses,zero_tuple].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+small_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+app_call(Config) ->
+ case dialyze(Config, app_call) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+appmon_place(Config) ->
+ case dialyze(Config, appmon_place) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+areq(Config) ->
+ case dialyze(Config, areq) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+atom_call(Config) ->
+ case dialyze(Config, atom_call) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+atom_guard(Config) ->
+ case dialyze(Config, atom_guard) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+atom_widen(Config) ->
+ case dialyze(Config, atom_widen) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+bs_fail_constr(Config) ->
+ case dialyze(Config, bs_fail_constr) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+bs_utf8(Config) ->
+ case dialyze(Config, bs_utf8) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+cerl_hipeify(Config) ->
+ case dialyze(Config, cerl_hipeify) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+comm_layer(Config) ->
+ case dialyze(Config, comm_layer) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+compare1(Config) ->
+ case dialyze(Config, compare1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+confusing_warning(Config) ->
+ case dialyze(Config, confusing_warning) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+contract2(Config) ->
+ case dialyze(Config, contract2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+contract3(Config) ->
+ case dialyze(Config, contract3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+contract5(Config) ->
+ case dialyze(Config, contract5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+disj_norm_form(Config) ->
+ case dialyze(Config, disj_norm_form) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+eqeq(Config) ->
+ case dialyze(Config, eqeq) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+ets_select(Config) ->
+ case dialyze(Config, ets_select) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+exhaust_case(Config) ->
+ case dialyze(Config, exhaust_case) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+failing_guard1(Config) ->
+ case dialyze(Config, failing_guard1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+flatten(Config) ->
+ case dialyze(Config, flatten) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+fun_app(Config) ->
+ case dialyze(Config, fun_app) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+fun_ref_match(Config) ->
+ case dialyze(Config, fun_ref_match) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+fun_ref_record(Config) ->
+ case dialyze(Config, fun_ref_record) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+gencall(Config) ->
+ case dialyze(Config, gencall) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+gs_make(Config) ->
+ case dialyze(Config, gs_make) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+inf_loop2(Config) ->
+ case dialyze(Config, inf_loop2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+letrec1(Config) ->
+ case dialyze(Config, letrec1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+list_match(Config) ->
+ case dialyze(Config, list_match) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+lzip(Config) ->
+ case dialyze(Config, lzip) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+make_tuple(Config) ->
+ case dialyze(Config, make_tuple) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+minus_minus(Config) ->
+ case dialyze(Config, minus_minus) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+mod_info(Config) ->
+ case dialyze(Config, mod_info) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+my_filter(Config) ->
+ case dialyze(Config, my_filter) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+my_sofs(Config) ->
+ case dialyze(Config, my_sofs) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+no_match(Config) ->
+ case dialyze(Config, no_match) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+no_unused_fun(Config) ->
+ case dialyze(Config, no_unused_fun) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+no_unused_fun2(Config) ->
+ case dialyze(Config, no_unused_fun2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+non_existing(Config) ->
+ case dialyze(Config, non_existing) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+not_guard_crash(Config) ->
+ case dialyze(Config, not_guard_crash) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+or_bug(Config) ->
+ case dialyze(Config, or_bug) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+orelsebug(Config) ->
+ case dialyze(Config, orelsebug) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+orelsebug2(Config) ->
+ case dialyze(Config, orelsebug2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+overloaded1(Config) ->
+ case dialyze(Config, overloaded1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+port_info_test(Config) ->
+ case dialyze(Config, port_info_test) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+process_info_test(Config) ->
+ case dialyze(Config, process_info_test) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+pubsub(Config) ->
+ case dialyze(Config, pubsub) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+receive1(Config) ->
+ case dialyze(Config, receive1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+record_construct(Config) ->
+ case dialyze(Config, record_construct) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+record_pat(Config) ->
+ case dialyze(Config, record_pat) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+record_send_test(Config) ->
+ case dialyze(Config, record_send_test) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+record_test(Config) ->
+ case dialyze(Config, record_test) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+recursive_types1(Config) ->
+ case dialyze(Config, recursive_types1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+recursive_types2(Config) ->
+ case dialyze(Config, recursive_types2) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+recursive_types3(Config) ->
+ case dialyze(Config, recursive_types3) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+recursive_types4(Config) ->
+ case dialyze(Config, recursive_types4) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+recursive_types5(Config) ->
+ case dialyze(Config, recursive_types5) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+recursive_types6(Config) ->
+ case dialyze(Config, recursive_types6) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+recursive_types7(Config) ->
+ case dialyze(Config, recursive_types7) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+refine_bug1(Config) ->
+ case dialyze(Config, refine_bug1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+toth(Config) ->
+ case dialyze(Config, toth) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+trec(Config) ->
+ case dialyze(Config, trec) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+try1(Config) ->
+ case dialyze(Config, try1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+tuple1(Config) ->
+ case dialyze(Config, tuple1) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+unsafe_beamcode_bug(Config) ->
+ case dialyze(Config, unsafe_beamcode_bug) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+unused_cases(Config) ->
+ case dialyze(Config, unused_cases) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+unused_clauses(Config) ->
+ case dialyze(Config, unused_clauses) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+zero_tuple(Config) ->
+ case dialyze(Config, zero_tuple) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/small_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..50991c9bc5
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/dialyzer_options
@@ -0,0 +1 @@
+{dialyzer_options, []}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/andalso_test b/lib/dialyzer/test/small_tests_SUITE_data/results/andalso_test
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/andalso_test
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/app_call b/lib/dialyzer/test/small_tests_SUITE_data/results/app_call
new file mode 100644
index 0000000000..cc1a63f944
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/app_call
@@ -0,0 +1,3 @@
+
+app_call.erl:6: The call M:'foo'() requires that M is of type atom() | tuple() not 42
+app_call.erl:9: The call 'mod':F() requires that F is of type atom() not {'gazonk',[]}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/appmon_place b/lib/dialyzer/test/small_tests_SUITE_data/results/appmon_place
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/appmon_place
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/areq b/lib/dialyzer/test/small_tests_SUITE_data/results/areq
new file mode 100644
index 0000000000..dd91f2d2bf
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/areq
@@ -0,0 +1,2 @@
+
+areq.erl:11: The test float() =:= 3 can never evaluate to 'true'
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/atom_call b/lib/dialyzer/test/small_tests_SUITE_data/results/atom_call
new file mode 100644
index 0000000000..851bb7ab12
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/atom_call
@@ -0,0 +1,3 @@
+
+atom_call.erl:14: Fun application will fail since F :: 'f' is not a function of arity 0
+atom_call.erl:14: Function g/0 has no local return
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/atom_widen b/lib/dialyzer/test/small_tests_SUITE_data/results/atom_widen
new file mode 100644
index 0000000000..6d0a7b2737
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/atom_widen
@@ -0,0 +1,3 @@
+
+atom_widen.erl:10: The call atom_widen:foo('z') will never return since it differs in the 1st argument from the success typing arguments: ('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'k' | 'l' | 'm' | 'n')
+atom_widen.erl:9: Function test/0 has no local return
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/bs_fail_constr b/lib/dialyzer/test/small_tests_SUITE_data/results/bs_fail_constr
new file mode 100644
index 0000000000..dbc8241971
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/bs_fail_constr
@@ -0,0 +1,9 @@
+
+bs_fail_constr.erl:11: Function w3/1 has no local return
+bs_fail_constr.erl:12: Binary construction will fail since the size field S in segment 42:S/integer-unit:1 has type neg_integer()
+bs_fail_constr.erl:14: Function w4/1 has no local return
+bs_fail_constr.erl:15: Binary construction will fail since the value field V in segment V/utf32 has type float()
+bs_fail_constr.erl:5: Function w1/1 has no local return
+bs_fail_constr.erl:6: Binary construction will fail since the value field V in segment V:8/integer-unit:1 has type float()
+bs_fail_constr.erl:8: Function w2/1 has no local return
+bs_fail_constr.erl:9: Binary construction will fail since the value field V in segment V/binary-unit:8 has type atom()
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/bs_utf8 b/lib/dialyzer/test/small_tests_SUITE_data/results/bs_utf8
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/bs_utf8
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/cerl_hipeify b/lib/dialyzer/test/small_tests_SUITE_data/results/cerl_hipeify
new file mode 100644
index 0000000000..87bf6f309f
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/cerl_hipeify
@@ -0,0 +1,4 @@
+
+cerl_hipeify.erl:370: Function will never be called
+cerl_hipeify.erl:370: Guard test fun((none()) -> none()) =:= F::{_,_,_} | {_,_,_,_} | {_,_,_,_,_} | {_,_,_,_,_,_} | {_,_,_,_,_,_,_} can never succeed
+cerl_hipeify.erl:641: Function env__new_function_name/2 will never be called
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/comm_layer b/lib/dialyzer/test/small_tests_SUITE_data/results/comm_layer
new file mode 100644
index 0000000000..cb4bf14eb4
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/comm_layer
@@ -0,0 +1,2 @@
+
+comm_layer.erl:76: Invalid type specification for function 'comm_layer_dir.comm_layer':this/0. The success typing is () -> {_,integer(),pid()}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/compare1 b/lib/dialyzer/test/small_tests_SUITE_data/results/compare1
new file mode 100644
index 0000000000..f0d696ffcb
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/compare1
@@ -0,0 +1,4 @@
+
+compare1.erl:15: Guard test X::42 > 42 can never succeed
+compare1.erl:17: Guard test X::42 < 42 can never succeed
+compare1.erl:19: Guard test X::42 =/= 42 can never succeed
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/confusing_warning b/lib/dialyzer/test/small_tests_SUITE_data/results/confusing_warning
new file mode 100644
index 0000000000..d2d0c91fff
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/confusing_warning
@@ -0,0 +1,2 @@
+
+confusing_warning.erl:16: The pattern {'a', {_, L}} can never match the type {'b','aaa' | 'bbb'}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/contract1 b/lib/dialyzer/test/small_tests_SUITE_data/results/contract1
new file mode 100644
index 0000000000..fb8ba5f72b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/contract1
@@ -0,0 +1,3 @@
+
+contract1.erl:23: Function test/0 has no local return
+contract1.erl:24: The pattern 42 can never match the type 'a' | 'b' | 'c'
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/contract2 b/lib/dialyzer/test/small_tests_SUITE_data/results/contract2
new file mode 100644
index 0000000000..6809e528c4
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/contract2
@@ -0,0 +1,2 @@
+
+contract2.erl:13: The call contract2:test(T::any(),nonempty_maybe_improper_list()) will never return since it differs in the 2nd argument from the success typing arguments: (['true'],[])
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/contract3 b/lib/dialyzer/test/small_tests_SUITE_data/results/contract3
new file mode 100644
index 0000000000..44b49e745a
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/contract3
@@ -0,0 +1,3 @@
+
+contract3.erl:17: Overloaded contract has overlapping domains; such contracts are currently unsupported and are simply ignored
+contract3.erl:29: Overloaded contract has overlapping domains; such contracts are currently unsupported and are simply ignored
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/contract5 b/lib/dialyzer/test/small_tests_SUITE_data/results/contract5
new file mode 100644
index 0000000000..116c4f4d4d
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/contract5
@@ -0,0 +1,2 @@
+
+contract5.erl:13: Invalid type specification for function contract5:t/0. The success typing is () -> #bar{baz::'not_a_boolean'}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/eqeq b/lib/dialyzer/test/small_tests_SUITE_data/results/eqeq
new file mode 100644
index 0000000000..dabd38ebe3
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/eqeq
@@ -0,0 +1,2 @@
+
+eqeq.erl:15: The test float() =:= 'foo' can never evaluate to 'true'
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/ets_select b/lib/dialyzer/test/small_tests_SUITE_data/results/ets_select
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/ets_select
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/exhaust_case b/lib/dialyzer/test/small_tests_SUITE_data/results/exhaust_case
new file mode 100644
index 0000000000..45cdd80b64
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/exhaust_case
@@ -0,0 +1,3 @@
+
+exhaust_case.erl:17: The pattern 42 can never match the type 'bar' | 'foo'
+exhaust_case.erl:18: The variable _other can never match since previous clauses completely covered the type 'bar' | 'foo'
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/failing_guard1 b/lib/dialyzer/test/small_tests_SUITE_data/results/failing_guard1
new file mode 100644
index 0000000000..5bdd13093a
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/failing_guard1
@@ -0,0 +1,4 @@
+
+failing_guard1.erl:12: Guard test float() =:= 2 can never succeed
+failing_guard1.erl:13: Guard test integer() =:= float() can never succeed
+failing_guard1.erl:14: Guard test -2 | -1 | 0 | 1 | 2 =:= float() can never succeed
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/flatten b/lib/dialyzer/test/small_tests_SUITE_data/results/flatten
new file mode 100644
index 0000000000..4571214e49
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/flatten
@@ -0,0 +1,2 @@
+
+flatten.erl:17: The call lists:flatten(nonempty_improper_list(atom() | binary() | [any()] | char(),atom())) will never return since it differs in the 1st argument from the success typing arguments: ([any()])
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/fun_app b/lib/dialyzer/test/small_tests_SUITE_data/results/fun_app
new file mode 100644
index 0000000000..b28baad43b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/fun_app
@@ -0,0 +1,7 @@
+
+fun_app.erl:37: Fun application will fail since F :: fun((_,_,_) -> 'ok' | 'true') is not a function of arity 1
+fun_app.erl:37: The created fun has no local return
+fun_app.erl:38: Fun application will fail since F :: fun((_,_,_) -> 'ok' | 'true') is not a function of arity 2
+fun_app.erl:38: The created fun has no local return
+fun_app.erl:40: Fun application will fail since F :: fun((_,_,_) -> 'ok' | 'true') is not a function of arity 4
+fun_app.erl:40: The created fun has no local return
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/fun_ref_match b/lib/dialyzer/test/small_tests_SUITE_data/results/fun_ref_match
new file mode 100644
index 0000000000..60b34530b4
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/fun_ref_match
@@ -0,0 +1,2 @@
+
+fun_ref_match.erl:14: Function will never be called
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/gencall b/lib/dialyzer/test/small_tests_SUITE_data/results/gencall
new file mode 100644
index 0000000000..d0479ed738
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/gencall
@@ -0,0 +1,4 @@
+
+gencall.erl:11: Call to missing or unexported function gencall:foo/0
+gencall.erl:12: Call to missing or unexported function gen_server:handle_cast/2
+gencall.erl:9: Call to missing or unexported function ets:lookup/3
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/gs_make b/lib/dialyzer/test/small_tests_SUITE_data/results/gs_make
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/gs_make
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/inf_loop2 b/lib/dialyzer/test/small_tests_SUITE_data/results/inf_loop2
new file mode 100644
index 0000000000..7e9972ad98
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/inf_loop2
@@ -0,0 +1,4 @@
+
+inf_loop2.erl:18: Function test/0 has no local return
+inf_loop2.erl:19: The call lists:reverse('gazonk') will never return since it differs in the 1st argument from the success typing arguments: ([any()])
+inf_loop2.erl:22: Function loop/0 has no local return
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/letrec1 b/lib/dialyzer/test/small_tests_SUITE_data/results/letrec1
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/letrec1
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/list_match b/lib/dialyzer/test/small_tests_SUITE_data/results/list_match
new file mode 100644
index 0000000000..95007da604
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/list_match
@@ -0,0 +1,2 @@
+
+list_match.erl:19: The pattern [_ | T] can never match since previous clauses completely covered the type [1 | 2 | 3 | 4]
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/lzip b/lib/dialyzer/test/small_tests_SUITE_data/results/lzip
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/lzip
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/make_tuple b/lib/dialyzer/test/small_tests_SUITE_data/results/make_tuple
new file mode 100644
index 0000000000..4d51586e35
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/make_tuple
@@ -0,0 +1,3 @@
+
+make_tuple.erl:4: Function test/0 has no local return
+make_tuple.erl:5: The pattern {_, _} can never match the type {_,_,_}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/minus_minus b/lib/dialyzer/test/small_tests_SUITE_data/results/minus_minus
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/minus_minus
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/mod_info b/lib/dialyzer/test/small_tests_SUITE_data/results/mod_info
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/mod_info
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/my_filter b/lib/dialyzer/test/small_tests_SUITE_data/results/my_filter
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/my_filter
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/my_sofs b/lib/dialyzer/test/small_tests_SUITE_data/results/my_sofs
new file mode 100644
index 0000000000..bfee0bce0d
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/my_sofs
@@ -0,0 +1,3 @@
+
+my_sofs.erl:34: The pattern {'Set', _, _} can never match the type #OrdSet{}
+my_sofs.erl:54: The pattern {'Set', _, _} can never match the type #OrdSet{}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/no_match b/lib/dialyzer/test/small_tests_SUITE_data/results/no_match
new file mode 100644
index 0000000000..9760b980a2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/no_match
@@ -0,0 +1,4 @@
+
+no_match.erl:5: Function t1/1 has no clauses that will ever match
+no_match.erl:7: Function t2/1 has no clauses that will ever match
+no_match.erl:9: Function t3/1 has no local return
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun b/lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun2 b/lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun2
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/no_unused_fun2
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/non_existing b/lib/dialyzer/test/small_tests_SUITE_data/results/non_existing
new file mode 100644
index 0000000000..58da2bfc8b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/non_existing
@@ -0,0 +1,2 @@
+
+non_existing.erl:9: Call to missing or unexported function lists:non_existing_call/1
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/not_guard_crash b/lib/dialyzer/test/small_tests_SUITE_data/results/not_guard_crash
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/not_guard_crash
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/or_bug b/lib/dialyzer/test/small_tests_SUITE_data/results/or_bug
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/or_bug
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug b/lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug2 b/lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug2
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/orelsebug2
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/overloaded1 b/lib/dialyzer/test/small_tests_SUITE_data/results/overloaded1
new file mode 100644
index 0000000000..ab57ec03ff
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/overloaded1
@@ -0,0 +1,3 @@
+
+overloaded1.erl:10: The pattern {'ok', 'gazonk'} can never match the type {'error',_} | {'ok',{atom(),atom(),byte()}}
+overloaded1.erl:9: Function test1/0 has no local return
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/port_info_test b/lib/dialyzer/test/small_tests_SUITE_data/results/port_info_test
new file mode 100644
index 0000000000..9ee863f9eb
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/port_info_test
@@ -0,0 +1,6 @@
+
+port_info_test.erl:10: The pattern {'connected', 42} can never match the type 'undefined' | {'connected',pid()}
+port_info_test.erl:14: The pattern {'registered_name', "42"} can never match the type 'undefined' | {'registered_name',atom()}
+port_info_test.erl:19: The pattern {'output', 42} can never match the type 'undefined' | {'connected',pid()}
+port_info_test.erl:24: Guard test 'links' =:= Atom::'connected' can never succeed
+port_info_test.erl:28: The pattern {'gazonk', _} can never match the type 'undefined' | {'connected' | 'id' | 'input' | 'links' | 'name' | 'output' | 'registered_name',atom() | pid() | [pid() | char()] | integer()}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/process_info_test b/lib/dialyzer/test/small_tests_SUITE_data/results/process_info_test
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/process_info_test
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/pubsub b/lib/dialyzer/test/small_tests_SUITE_data/results/pubsub
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/pubsub
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/receive1 b/lib/dialyzer/test/small_tests_SUITE_data/results/receive1
new file mode 100644
index 0000000000..abf6eec0ca
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/receive1
@@ -0,0 +1,2 @@
+
+receive1.erl:12: Function t/1 has no local return
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/record_construct b/lib/dialyzer/test/small_tests_SUITE_data/results/record_construct
new file mode 100644
index 0000000000..c0110b144f
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/record_construct
@@ -0,0 +1,7 @@
+
+record_construct.erl:15: Function t_opa/0 has no local return
+record_construct.erl:16: Record construction #r_opa{b::gb_set(),c::42,e::'false'} violates the declared type of field c::boolean()
+record_construct.erl:20: Function t_rem/0 has no local return
+record_construct.erl:21: Record construction #r_rem{a::'gazonk'} violates the declared type of field a::string()
+record_construct.erl:6: Function t_loc/0 has no local return
+record_construct.erl:7: Record construction #r_loc{a::'gazonk',b::42} violates the declared type of field a::integer() and b::atom()
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/record_pat b/lib/dialyzer/test/small_tests_SUITE_data/results/record_pat
new file mode 100644
index 0000000000..9a3f925e42
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/record_pat
@@ -0,0 +1,2 @@
+
+record_pat.erl:14: The pattern {'foo', 'baz'} violates the declared type for #foo{}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/record_send_test b/lib/dialyzer/test/small_tests_SUITE_data/results/record_send_test
new file mode 100644
index 0000000000..6a08d44179
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/record_send_test
@@ -0,0 +1,2 @@
+
+record_send_test.erl:30: The call erlang:'!'(Rec1::#rec1{a::'a',b::'b',c::'c'},'hello_again') will never return since it differs in the 1st argument from the success typing arguments: (atom() | pid() | port() | {atom(),atom()},any())
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/record_test b/lib/dialyzer/test/small_tests_SUITE_data/results/record_test
new file mode 100644
index 0000000000..9715f0dcfb
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/record_test
@@ -0,0 +1,3 @@
+
+record_test.erl:19: The pattern {'foo', _} can never match the type 'foo'
+record_test.erl:21: The variable _ can never match since previous clauses completely covered the type 'foo'
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types1 b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types1
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types1
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types2 b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types2
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types2
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types3 b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types3
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types3
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types4 b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types4
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types4
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types5 b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types5
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types5
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types6 b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types6
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types6
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types7 b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types7
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/recursive_types7
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/toth b/lib/dialyzer/test/small_tests_SUITE_data/results/toth
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/toth
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/trec b/lib/dialyzer/test/small_tests_SUITE_data/results/trec
new file mode 100644
index 0000000000..01ccc63761
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/trec
@@ -0,0 +1,7 @@
+
+trec.erl:26: Function test/0 has no local return
+trec.erl:27: The call trec:mk_foo_loc(42,any()) will never return since it differs in the 1st argument from the success typing arguments: ('undefined',atom())
+trec.erl:29: Function mk_foo_loc/2 has no local return
+trec.erl:30: Record construction violates the declared type for #foo{} since variable A cannot be of type atom()
+trec.erl:36: Function mk_foo_exp/2 has no local return
+trec.erl:37: Record construction violates the declared type for #foo{} since variable A cannot be of type atom()
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/try1 b/lib/dialyzer/test/small_tests_SUITE_data/results/try1
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/try1
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/tuple1 b/lib/dialyzer/test/small_tests_SUITE_data/results/tuple1
new file mode 100644
index 0000000000..1b5ed49b56
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/tuple1
@@ -0,0 +1,5 @@
+
+tuple1.erl:13: Function t1/2 has no local return
+tuple1.erl:14: The call lists:mapfoldl(fun((_,_) -> 'a' | 'b'),X::any(),List::nonempty_maybe_improper_list()) will never return since the success typing arguments are (fun((_,_) -> {_,_}),any(),[any()])
+tuple1.erl:19: Function t3/2 has no local return
+tuple1.erl:20: The call lists:mapfoldl(fun((_) -> 1),X::any(),List::nonempty_maybe_improper_list()) will never return since it differs in the 1st argument from the success typing arguments: (fun((_,_) -> {_,_}),any(),[any()])
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/unsafe_beamcode_bug b/lib/dialyzer/test/small_tests_SUITE_data/results/unsafe_beamcode_bug
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/unsafe_beamcode_bug
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/unused_cases b/lib/dialyzer/test/small_tests_SUITE_data/results/unused_cases
new file mode 100644
index 0000000000..cafe1c042b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/unused_cases
@@ -0,0 +1,4 @@
+
+unused_cases.erl:21: The variable OTHER can never match since previous clauses completely covered the type {42,42}
+unused_cases.erl:27: The pattern 'weird' can never match the type 'false'
+unused_cases.erl:35: The variable OTHER can never match since previous clauses completely covered the type boolean()
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/unused_clauses b/lib/dialyzer/test/small_tests_SUITE_data/results/unused_clauses
new file mode 100644
index 0000000000..4603e888c1
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/unused_clauses
@@ -0,0 +1,3 @@
+
+unused_clauses.erl:16: Guard test is_integer(X::{42}) can never succeed
+unused_clauses.erl:18: The variable X can never match since previous clauses completely covered the type 'atom' | {42}
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/results/zero_tuple b/lib/dialyzer/test/small_tests_SUITE_data/results/zero_tuple
new file mode 100644
index 0000000000..bf5ec5cd6e
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/results/zero_tuple
@@ -0,0 +1,5 @@
+
+zero_tuple.erl:4: Function t1/0 has no local return
+zero_tuple.erl:5: The pattern {} can never match the type 'a'
+zero_tuple.erl:8: Function t2/0 has no local return
+zero_tuple.erl:9: The pattern 'b' can never match the type 'a'
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/app_call.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/app_call.erl
new file mode 100644
index 0000000000..54d178d29a
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/app_call.erl
@@ -0,0 +1,17 @@
+-module(app_call).
+-export([test/1]).
+
+test(m) ->
+ M = get_mod(),
+ M:foo();
+test(f) ->
+ F = get_fun(),
+ mod:F();
+test(_) ->
+ ok.
+
+get_mod() ->
+ 42.
+
+get_fun() ->
+ {gazonk, []}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/appmon_place.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/appmon_place.erl
new file mode 100644
index 0000000000..8371cab233
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/appmon_place.erl
@@ -0,0 +1,71 @@
+%%---------------------------------------------------------------------
+%% This is added as a test because it was giving a false positive
+%% (function move/4 will nevr be called) due to the strange use of
+%% self-recursive fun construction in placex/3.
+%%
+%% The analysis was getting confused that the foldl call will never
+%% terminate (due to a wrong hard-coded type for foldl) and inferred
+%% that the remaining calls in the body of placex/3 will not be
+%% reached. Fixed 11 March 2005.
+%%---------------------------------------------------------------------
+
+-module(appmon_place).
+-export([place/2]).
+
+place(DG, Root) ->
+ case appmon_dg:get(data, DG, Root) of
+ false -> [0];
+ _Other ->
+ placey(DG, Root, 1),
+ placex(DG, Root, [])
+ end.
+
+placey(DG, V, Y) ->
+ appmon_dg:set(y, DG, V, Y),
+ Y1 = Y+1,
+ lists:foreach(fun(C) -> placey(DG, C, Y1) end, appmon_dg:get(out, DG, V)).
+
+placex(DG, V, LastX) ->
+ Ch = appmon_dg:get(out, DG, V),
+ ChLX = lists:foldl(fun(C, Accu) -> placex(DG, C, Accu) end,
+ tll(LastX),
+ Ch),
+ Width = appmon_dg:get(w, DG, V),
+ MyX = calc_mid(DG, Width, Ch),
+ DeltaX = calc_delta(MyX, hdd(LastX)+20),
+ appmon_dg:set(x, DG, V, MyX),
+ move(DG, V, [MyX+Width | ChLX], DeltaX).
+
+move(_DG, _L, LastX, 0) -> LastX;
+move(DG, V, LastX, DeltaX) -> move2(DG, V, LastX, DeltaX).
+
+move2(DG, V, LastX, DeltaX) ->
+ NewX = appmon_dg:get(x, DG, V)+DeltaX,
+ appmon_dg:set(x, DG, V, NewX),
+ ChLX = lists:foldl(fun(C, LX) -> move2(DG, C, LX, DeltaX) end,
+ tll(LastX),
+ appmon_dg:get(out, DG, V)),
+ [max(NewX+appmon_dg:get(w, DG, V), hdd(LastX)) | ChLX].
+
+max(A, B) when A>B -> A;
+max(_, B) -> B.
+
+calc_mid(_DG, _Width, []) -> 0;
+calc_mid(DG, Width, ChList) ->
+ LeftMostX = appmon_dg:get(x, DG, hd(ChList)),
+ Z2 = lists:last(ChList),
+ RightMostX = appmon_dg:get(x, DG, Z2)+appmon_dg:get(w, DG, Z2),
+ trunc((LeftMostX+RightMostX)/2)-trunc(Width/2).
+
+calc_delta(Mid, Right) ->
+ if Right>Mid -> Right-Mid;
+ true -> 0
+ end.
+
+%% Special head and tail
+%% Handles empty list in a non-standard way
+tll([]) -> [];
+tll([_|T]) -> T.
+hdd([]) -> 0;
+hdd([H|_]) -> H.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/areq.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/areq.erl
new file mode 100644
index 0000000000..1b4eea8511
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/areq.erl
@@ -0,0 +1,12 @@
+-module(areq).
+
+-export([t/0]).
+
+t() ->
+ ar_comp(3.0, 3),
+ ex_comp(3.0, 3).
+
+ar_comp(X, Y) -> X == Y.
+
+ex_comp(X, Y) -> X =:= Y.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/atom_call.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/atom_call.erl
new file mode 100644
index 0000000000..bf0646eadc
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/atom_call.erl
@@ -0,0 +1,14 @@
+%%%-------------------------------------------------------------------
+%%% File : atom_call.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 10 Dec 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(atom_call).
+
+-export([f/0,g/0]).
+
+f() -> ok.
+
+g() -> F = f, F().
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/atom_guard.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/atom_guard.erl
new file mode 100644
index 0000000000..67d97f8e29
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/atom_guard.erl
@@ -0,0 +1,9 @@
+-module(atom_guard).
+-export([test/0]).
+
+test() ->
+ foo(42).
+
+foo(X) when is_atom(x) ->
+ X.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/atom_widen.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/atom_widen.erl
new file mode 100644
index 0000000000..81bfac9d56
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/atom_widen.erl
@@ -0,0 +1,24 @@
+%%---------------------------------------------------------------------
+%% Tests that the set widening limit is at least as big as 13,
+%% which allows for the following discrepancy to be detected.
+%%---------------------------------------------------------------------
+
+-module(atom_widen).
+-export([test/0, foo/1]).
+
+test() ->
+ foo(z).
+
+foo(a) -> 1;
+foo(b) -> 2;
+foo(c) -> 3;
+foo(d) -> 4;
+foo(e) -> 5;
+foo(f) -> 6;
+foo(g) -> 7;
+foo(h) -> 8;
+foo(i) -> 9;
+foo(k) -> 10;
+foo(l) -> 11;
+foo(m) -> 12;
+foo(n) -> 13.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/bs_fail_constr.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/bs_fail_constr.erl
new file mode 100644
index 0000000000..20fd1cbf64
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/bs_fail_constr.erl
@@ -0,0 +1,16 @@
+-module(bs_fail_constr).
+
+-export([w1/1, w2/1, w3/1, w4/1]).
+
+w1(V) when is_float(V) ->
+ <<V/integer>>.
+
+w2(V) when is_atom(V) ->
+ <<V/binary>>.
+
+w3(S) when is_integer(S), S < 0 ->
+ <<42:S/integer>>.
+
+w4(V) when is_float(V) ->
+ <<V/utf32>>.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/bs_utf8.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/bs_utf8.erl
new file mode 100644
index 0000000000..5fe28f1da1
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/bs_utf8.erl
@@ -0,0 +1,27 @@
+%%--------------------------------------------------------------------
+%% Test case that exposed a bug (bogus warning) in dialyzer_dataflow
+%% when refining binaries containing UTF-based segments. Reported by
+%% Patrik Nyblom on 4/3/2009 and fixed by Kostis Sagonas on 31/3/2009.
+%%--------------------------------------------------------------------
+
+-module(bs_utf8).
+
+-export([doit/2]).
+
+doit(N, Bin) when is_integer(N), N > 0 ->
+ count_and_find(Bin, N).
+
+count_and_find(Bin, N) when is_binary(Bin) ->
+ cafu(Bin, N, 0, 0, no_pos).
+
+cafu(<<>>, _N, Count, _ByteCount, SavePos) ->
+ {Count, SavePos};
+cafu(<<_/utf8, Rest/binary>>, 0, Count, ByteCount, _SavePos) ->
+ cafu(Rest, -1, Count+1, 0, ByteCount);
+cafu(<<_/utf8, Rest/binary>>, N, Count, _ByteCount, SavePos) when N < 0 ->
+ cafu(Rest, -1, Count+1, 0, SavePos);
+cafu(<<_/utf8, Rest/binary>> = Whole, N, Count, ByteCount, SavePos) ->
+ Delta = byte_size(Whole) - byte_size(Rest),
+ cafu(Rest, N-1, Count+1, ByteCount+Delta, SavePos);
+cafu(_Other, _N, Count, ByteCount, _SavePos) -> % Non Unicode character at end
+ {Count, ByteCount}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/cerl_hipeify.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/cerl_hipeify.erl
new file mode 100644
index 0000000000..3ccadec4d0
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/cerl_hipeify.erl
@@ -0,0 +1,684 @@
+%% =====================================================================
+%% This library is free software; you can redistribute it and/or modify
+%% it under the terms of the GNU Lesser General Public License as
+%% published by the Free Software Foundation; either version 2 of the
+%% License, or (at your option) any later version.
+%%
+%% This library is distributed in the hope that it will be useful, but
+%% WITHOUT ANY WARRANTY; without even the implied warranty of
+%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+%% Lesser General Public License for more details.
+%%
+%% You should have received a copy of the GNU Lesser General Public
+%% License along with this library; if not, write to the Free Software
+%% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
+%% USA
+%%
+%% $Id: cerl_hipeify.erl,v 1.1 2008/12/17 09:53:49 mikpe Exp $
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2000-2004 Richard Carlsson
+%% @doc HiPE-ification of Core Erlang code. Prepares Core Erlang code
+%% for translation to ICode.
+%% @see cerl_to_icode
+
+-module(cerl_hipeify).
+
+-export([transform/2]).
+
+-define(PRIMOP_IDENTITY, identity). % arity 1
+-define(PRIMOP_NOT, 'not'). % arity 1
+-define(PRIMOP_AND, 'and'). % arity 2
+-define(PRIMOP_OR, 'or'). % arity 2
+-define(PRIMOP_XOR, 'xor'). % arity 2
+-define(PRIMOP_ADD, '+'). % arity 2
+-define(PRIMOP_SUB, '-'). % arity 2
+-define(PRIMOP_NEG, neg). % arity 1
+-define(PRIMOP_MUL, '*'). % arity 2
+-define(PRIMOP_DIV, '/'). % arity 2
+-define(PRIMOP_INTDIV, 'div'). % arity 2
+-define(PRIMOP_REM, 'rem'). % arity 2
+-define(PRIMOP_BAND, 'band'). % arity 2
+-define(PRIMOP_BOR, 'bor'). % arity 2
+-define(PRIMOP_BXOR, 'bxor'). % arity 2
+-define(PRIMOP_BNOT, 'bnot'). % arity 1
+-define(PRIMOP_BSL, 'bsl'). % arity 2
+-define(PRIMOP_BSR, 'bsr'). % arity 2
+-define(PRIMOP_EQ, '=='). % arity 2
+-define(PRIMOP_NE, '/='). % arity 2
+-define(PRIMOP_EXACT_EQ, '=:='). % arity 2
+-define(PRIMOP_EXACT_NE, '=/='). % arity 2
+-define(PRIMOP_LT, '<'). % arity 2
+-define(PRIMOP_GT, '>'). % arity 2
+-define(PRIMOP_LE, '=<'). % arity 2
+-define(PRIMOP_GE, '>='). % arity 2
+-define(PRIMOP_IS_ATOM, 'is_atom'). % arity 1
+-define(PRIMOP_IS_BIGNUM, 'is_bignum'). % arity 1
+-define(PRIMOP_IS_BINARY, 'is_binary'). % arity 1
+-define(PRIMOP_IS_CONSTANT, 'is_constant'). % arity 1
+-define(PRIMOP_IS_FIXNUM, 'is_fixnum'). % arity 1
+-define(PRIMOP_IS_FLOAT, 'is_float'). % arity 1
+-define(PRIMOP_IS_FUNCTION, 'is_function'). % arity 1
+-define(PRIMOP_IS_INTEGER, 'is_integer'). % arity 1
+-define(PRIMOP_IS_LIST, 'is_list'). % arity 1
+-define(PRIMOP_IS_NUMBER, 'is_number'). % arity 1
+-define(PRIMOP_IS_PID, 'is_pid'). % arity 1
+-define(PRIMOP_IS_PORT, 'is_port'). % arity 1
+-define(PRIMOP_IS_REFERENCE, 'is_reference'). % arity 1
+-define(PRIMOP_IS_TUPLE, 'is_tuple'). % arity 1
+-define(PRIMOP_IS_RECORD, 'is_record'). % arity 3
+-define(PRIMOP_EXIT, exit). % arity 1
+-define(PRIMOP_THROW, throw). % arity 1
+-define(PRIMOP_ERROR, error). % arity 1,2
+-define(PRIMOP_RETHROW, raise). % arity 2
+-define(PRIMOP_RECEIVE_SELECT, receive_select). % arity 0
+-define(PRIMOP_RECEIVE_NEXT, receive_next). % arity 0
+-define(PRIMOP_ELEMENT, element). % arity 2
+-define(PRIMOP_DSETELEMENT, dsetelement). % arity 3
+-define(PRIMOP_MAKE_FUN, make_fun). % arity 6
+-define(PRIMOP_APPLY_FUN, apply_fun). % arity 2
+-define(PRIMOP_FUN_ELEMENT, closure_element). % arity 2
+-define(PRIMOP_SET_LABEL, set_label). % arity 1
+-define(PRIMOP_GOTO_LABEL, goto_label). % arity 1
+-define(PRIMOP_REDUCTION_TEST, reduction_test). % arity 0
+
+-record(ctxt, {class = expr}).
+
+
+%% @spec transform(Module::cerl(), Options::[term()]) -> cerl()
+%%
+%% cerl() = cerl:cerl()
+%%
+%% @doc Rewrites a Core Erlang module to a form suitable for further
+%% translation to HiPE Icode. See module <code>cerl_to_icode</code> for
+%% details.
+%%
+%% @see cerl_to_icode
+%% @see cerl_cconv
+
+transform(E, Opts) ->
+ %% Start by closure converting the code
+ module(cerl_cconv:transform(E, Opts), Opts).
+
+module(E, Opts) ->
+ {Ds, Env, Ren} = add_defs(cerl:module_defs(E), env__new(),
+ ren__new()),
+ M = cerl:module_name(E),
+ S0 = s__new(cerl:atom_val(M)),
+ S = s__set_pmatch(proplists:get_value(pmatch, Opts), S0),
+ {Ds1, _} = defs(Ds, true, Env, Ren, S),
+ cerl:update_c_module(E, M, cerl:module_exports(E),
+ cerl:module_attrs(E), Ds1).
+
+%% Note that the environment is defined on the renamed variables.
+
+expr(E0, Env, Ren, Ctxt, S0) ->
+ %% Do peephole optimizations as we traverse the code.
+ E = cerl_lib:reduce_expr(E0),
+ case cerl:type(E) of
+ literal ->
+ {E, S0};
+ var ->
+ variable(E, Env, Ren, Ctxt, S0);
+ values ->
+ {Es, S1} = expr_list(cerl:values_es(E), Env, Ren, Ctxt, S0),
+ {cerl:update_c_values(E, Es), S1};
+ cons ->
+ {E1, S1} = expr(cerl:cons_hd(E), Env, Ren, Ctxt, S0),
+ {E2, S2} = expr(cerl:cons_tl(E), Env, Ren, Ctxt, S1),
+ {cerl:update_c_cons(E, E1, E2), S2};
+ tuple ->
+ {Es, S1} = expr_list(cerl:tuple_es(E), Env, Ren, Ctxt, S0),
+ {cerl:update_c_tuple(E, Es), S1};
+ 'let' ->
+ let_expr(E, Env, Ren, Ctxt, S0);
+ seq ->
+ {A, S1} = expr(cerl:seq_arg(E), Env, Ren, Ctxt, S0),
+ {B, S2} = expr(cerl:seq_body(E), Env, Ren, Ctxt, S1),
+ {cerl:update_c_seq(E, A, B), S2};
+ apply ->
+ {Op, S1} = expr(cerl:apply_op(E), Env, Ren, Ctxt, S0),
+ {As, S2} = expr_list(cerl:apply_args(E), Env, Ren, Ctxt, S1),
+ {cerl:update_c_apply(E, Op, As), S2};
+ call ->
+ {M, S1} = expr(cerl:call_module(E), Env, Ren, Ctxt, S0),
+ {N, S2} = expr(cerl:call_name(E), Env, Ren, Ctxt, S1),
+ {As, S3} = expr_list(cerl:call_args(E), Env, Ren, Ctxt, S2),
+ {rewrite_call(E, M, N, As, S3), S3};
+ primop ->
+ {As, S1} = expr_list(cerl:primop_args(E), Env, Ren, Ctxt, S0),
+ N = cerl:primop_name(E),
+ {rewrite_primop(E, N, As, S1), S1};
+ 'case' ->
+ {A, S1} = expr(cerl:case_arg(E), Env, Ren, Ctxt, S0),
+ {E1, Vs, S2} = clauses(cerl:case_clauses(E), Env, Ren, Ctxt, S1),
+ {cerl:c_let(Vs, A, E1), S2};
+ 'fun' ->
+ Vs = cerl:fun_vars(E),
+ {Vs1, Env1, Ren1} = add_vars(Vs, Env, Ren),
+ {B, S1} = expr(cerl:fun_body(E), Env1, Ren1, Ctxt, S0),
+ {cerl:update_c_fun(E, Vs1, B), S1};
+ 'receive' ->
+ receive_expr(E, Env, Ren, Ctxt, S0);
+ 'try' ->
+ {A, S1} = expr(cerl:try_arg(E), Env, Ren, Ctxt, S0),
+ Vs = cerl:try_vars(E),
+ {Vs1, Env1, Ren1} = add_vars(Vs, Env, Ren),
+ {B, S2} = expr(cerl:try_body(E), Env1, Ren1, Ctxt, S1),
+ Evs = cerl:try_evars(E),
+ {Evs1, Env2, Ren2} = add_vars(Evs, Env, Ren),
+ {H, S3} = expr(cerl:try_handler(E), Env2, Ren2, Ctxt, S2),
+ {cerl:update_c_try(E, A, Vs1, B, Evs1, H), S3};
+ 'catch' ->
+ catch_expr(E, Env, Ren, Ctxt, S0);
+ letrec ->
+ {Ds, Env1, Ren1} = add_defs(cerl:letrec_defs(E), Env, Ren),
+ {Ds1, S1} = defs(Ds, false, Env1, Ren1, S0),
+ {B, S2} = expr(cerl:letrec_body(E), Env1, Ren1, Ctxt, S1),
+ {cerl:update_c_letrec(E, Ds1, B), S2};
+ binary ->
+ {Segs, S1}=expr_list(cerl:binary_segments(E), Env, Ren,
+ Ctxt, S0),
+ {cerl:update_c_binary(E, Segs), S1};
+ bitstr ->
+ {E1,S1} = expr(cerl:bitstr_val(E), Env, Ren, Ctxt, S0),
+ {E2,S2} = expr(cerl:bitstr_size(E), Env, Ren, Ctxt, S1),
+ E3 = cerl:bitstr_unit(E),
+ E4 = cerl:bitstr_type(E),
+ E5 = cerl:bitstr_flags(E),
+ {cerl:update_c_bitstr(E, E1, E2, E3, E4, E5), S2}
+ end.
+
+guard_expr(E, Env, Ren, Ctxt, S) ->
+ expr(E, Env, Ren, Ctxt#ctxt{class = guard}, S).
+
+expr_list(Es, Env, Ren, Ctxt, S0) ->
+ list(Es, Env, Ren, Ctxt, S0, fun expr/5).
+
+list([E | Es], Env, Ren, Ctxt, S0, F) ->
+ {E1, S1} = F(E, Env, Ren, Ctxt, S0),
+ {Es1, S2} = list(Es, Env, Ren, Ctxt, S1, F),
+ {[E1 | Es1], S2};
+list([], _, _, _, S, _) ->
+ {[], S}.
+
+pattern(E, Env, Ren) ->
+ case cerl:type(E) of
+ literal ->
+ E;
+ var ->
+ cerl:update_c_var(E, ren__map(cerl:var_name(E), Ren));
+ values ->
+ Es = pattern_list(cerl:values_es(E), Env, Ren),
+ cerl:update_c_values(E, Es);
+ cons ->
+ E1 = pattern(cerl:cons_hd(E), Env, Ren),
+ E2 = pattern(cerl:cons_tl(E), Env, Ren),
+ cerl:update_c_cons(E, E1, E2);
+ tuple ->
+ Es = pattern_list(cerl:tuple_es(E), Env, Ren),
+ cerl:update_c_tuple(E, Es);
+ alias ->
+ V = pattern(cerl:alias_var(E), Env, Ren),
+ P = pattern(cerl:alias_pat(E), Env, Ren),
+ cerl:update_c_alias(E, V, P);
+ binary ->
+ Segs=pattern_list(cerl:binary_segments(E), Env, Ren),
+ cerl:update_c_binary(E, Segs);
+ bitstr ->
+ E1 = pattern(cerl:bitstr_val(E), Env, Ren),
+ E2 = pattern(cerl:bitstr_size(E), Env, Ren),
+ E3 = cerl:bitstr_unit(E),
+ E4 = cerl:bitstr_type(E),
+ E5 = cerl:bitstr_flags(E),
+ cerl:update_c_bitstr(E, E1, E2, E3, E4, E5)
+ end.
+
+
+
+pattern_list([E | Es], Env, Ren) ->
+ [pattern(E, Env, Ren) | pattern_list(Es, Env, Ren)];
+pattern_list([], _, _) ->
+ [].
+
+%% Visit the function body of each definition. We insert an explicit
+%% reduction test at the start of each function.
+
+defs(Ds, Top, Env, Ren, S) ->
+ defs(Ds, [], Top, Env, Ren, S).
+
+defs([{V, F} | Ds], Ds1, Top, Env, Ren, S0) ->
+ S1 = case Top of
+ true -> s__enter_function(cerl:var_name(V), S0);
+ false -> S0
+ end,
+ {B, S2} = expr(cerl:fun_body(F), Env, Ren, #ctxt{}, S1),
+ B1 = cerl:c_seq(cerl:c_primop(cerl:c_atom(?PRIMOP_REDUCTION_TEST),
+ []),
+ B),
+ F1 = cerl:update_c_fun(F, cerl:fun_vars(F), B1),
+ defs(Ds, [{V, F1} | Ds1], Top, Env, Ren, S2);
+defs([], Ds, _Top, _Env, _Ren, S) ->
+ {lists:reverse(Ds), S}.
+
+clauses([C|_]=Cs, Env, Ren, Ctxt, S) ->
+ {Cs1, S1} = clause_list(Cs, Env, Ren, Ctxt, S),
+ %% Perform pattern matching compilation on the clauses.
+ {E, Vs} = case s__get_pmatch(S) of
+ true ->
+ cerl_pmatch:clauses(Cs1, Env);
+ no_duplicates ->
+ put('cerl_pmatch_duplicate_code', never),
+ cerl_pmatch:clauses(Cs1, Env);
+ duplicate_all ->
+ put('cerl_pmatch_duplicate_code', always),
+ cerl_pmatch:clauses(Cs1, Env);
+ Other when Other == false; Other == undefined ->
+ Vs0 = new_vars(cerl:clause_arity(C), Env),
+ {cerl:c_case(cerl:c_values(Vs0), Cs1), Vs0}
+ end,
+ %% We must make sure that we also visit any clause guards generated
+ %% by the pattern matching compilation. We pass an empty renaming,
+ %% so we do not rename any variables twice.
+ {E1, S2} = revisit_expr(E, Env, ren__new(), Ctxt, S1),
+ {E1, Vs, S2}.
+
+clause_list(Cs, Env, Ren, Ctxt, S) ->
+ list(Cs, Env, Ren, Ctxt, S, fun clause/5).
+
+clause(E, Env, Ren, Ctxt, S0) ->
+ Vs = cerl:clause_vars(E),
+ {_, Env1, Ren1} = add_vars(Vs, Env, Ren),
+ %% Visit patterns to rename variables.
+ Ps = pattern_list(cerl:clause_pats(E), Env1, Ren1),
+ {G, S1} = guard_expr(cerl:clause_guard(E), Env1, Ren1, Ctxt, S0),
+ {B, S2} = expr(cerl:clause_body(E), Env1, Ren1, Ctxt, S1),
+ {cerl:update_c_clause(E, Ps, G, B), S2}.
+
+%% This does what 'expr' does, but only recurses into clause guard
+%% expressions, 'case'-expressions, and the bodies of lets and letrecs.
+%% Note that revisiting should not add further renamings, and we simply
+%% ignore making any bindings at all at this level.
+
+revisit_expr(E, Env, Ren, Ctxt, S0) ->
+ %% Also enable peephole optimizations here.
+ revisit_expr_1(cerl_lib:reduce_expr(E), Env, Ren, Ctxt, S0).
+
+revisit_expr_1(E, Env, Ren, Ctxt, S0) ->
+ case cerl:type(E) of
+ 'case' ->
+ {Cs, S1} = revisit_clause_list(cerl:case_clauses(E), Env,
+ Ren, Ctxt, S0),
+ {cerl:update_c_case(E, cerl:case_arg(E), Cs), S1};
+ 'let' ->
+ {B, S1} = revisit_expr(cerl:let_body(E), Env, Ren, Ctxt, S0),
+ {cerl:update_c_let(E, cerl:let_vars(E), cerl:let_arg(E), B),
+ S1};
+ 'letrec' ->
+ {B, S1} = revisit_expr(cerl:letrec_body(E), Env, Ren, Ctxt, S0),
+ {cerl:update_c_letrec(E, cerl:letrec_defs(E), B), S1};
+ _ ->
+ {E, S0}
+ end.
+
+revisit_clause_list(Cs, Env, Ren, Ctxt, S) ->
+ list(Cs, Env, Ren, Ctxt, S, fun revisit_clause/5).
+
+revisit_clause(E, Env, Ren, Ctxt, S0) ->
+ %% Ignore the bindings.
+ {G, S1} = guard_expr(cerl:clause_guard(E), Env, Ren, Ctxt, S0),
+ {B, S2} = revisit_expr(cerl:clause_body(E), Env, Ren, Ctxt, S1),
+ {cerl:update_c_clause(E, cerl:clause_pats(E), G, B), S2}.
+
+%% We use the no-shadowing strategy, renaming variables on the fly and
+%% only when necessary to uphold the invariant.
+
+add_vars(Vs, Env, Ren) ->
+ add_vars(Vs, [], Env, Ren).
+
+add_vars([V | Vs], Vs1, Env, Ren) ->
+ Name = cerl:var_name(V),
+ {Name1, Ren1} = rename(Name, Env, Ren),
+ add_vars(Vs, [cerl:update_c_var(V, Name1) | Vs1],
+ env__bind(Name1, variable, Env), Ren1);
+add_vars([], Vs, Env, Ren) ->
+ {lists:reverse(Vs), Env, Ren}.
+
+rename(Name, Env, Ren) ->
+ case env__is_defined(Name, Env) of
+ false ->
+ {Name, Ren};
+ true ->
+ New = env__new_name(Env),
+ {New, ren__add(Name, New, Ren)}
+ end.
+
+%% Setting up the environment for a list of letrec-bound definitions.
+
+add_defs(Ds, Env, Ren) ->
+ add_defs(Ds, [], Env, Ren).
+
+add_defs([{V, F} | Ds], Ds1, Env, Ren) ->
+ Name = cerl:var_name(V),
+ {Name1, Ren1} =
+ case env__is_defined(Name, Env) of
+ false ->
+ {Name, Ren};
+ true ->
+ {N, A} = Name,
+ S = atom_to_list(N) ++ "_",
+ F = fun (Num) -> %% XXX: BUG: This should be F1
+ {list_to_atom(S ++ integer_to_list(Num)), A}
+ end,
+ New = env__new_function_name(F, Env),
+ {New, ren__add(Name, New, Ren)}
+ end,
+ add_defs(Ds, [{cerl:update_c_var(V, Name1), F} | Ds1],
+ env__bind(Name1, function, Env), Ren1);
+add_defs([], Ds, Env, Ren) ->
+ {lists:reverse(Ds), Env, Ren}.
+
+%% We change remote calls to important built-in functions into primop
+%% calls. In some cases (e.g., for the boolean operators), this is
+%% mainly to allow the cerl_to_icode module to handle them more
+%% straightforwardly. In most cases however, it is simply because they
+%% are supposed to be represented as primop calls on the Icode level.
+
+rewrite_call(E, M, F, As, S) ->
+ case cerl:is_c_atom(M) and cerl:is_c_atom(F) of
+ true ->
+ case call_to_primop(cerl:atom_val(M),
+ cerl:atom_val(F),
+ length(As))
+ of
+ {yes, N} ->
+ %% The primop might need further handling
+ N1 = cerl:c_atom(N),
+ E1 = cerl:update_c_primop(E, N1, As),
+ rewrite_primop(E1, N1, As, S);
+ no ->
+ cerl:update_c_call(E, M, F, As)
+ end;
+ false ->
+ cerl:update_c_call(E, M, F, As)
+ end.
+
+call_to_primop(erlang, 'not', 1) -> {yes, ?PRIMOP_NOT};
+call_to_primop(erlang, 'and', 2) -> {yes, ?PRIMOP_AND};
+call_to_primop(erlang, 'or', 2) -> {yes, ?PRIMOP_OR};
+call_to_primop(erlang, 'xor', 2) -> {yes, ?PRIMOP_XOR};
+call_to_primop(erlang, '+', 2) -> {yes, ?PRIMOP_ADD};
+call_to_primop(erlang, '+', 1) -> {yes, ?PRIMOP_IDENTITY};
+call_to_primop(erlang, '-', 2) -> {yes, ?PRIMOP_SUB};
+call_to_primop(erlang, '-', 1) -> {yes, ?PRIMOP_NEG};
+call_to_primop(erlang, '*', 2) -> {yes, ?PRIMOP_MUL};
+call_to_primop(erlang, '/', 2) -> {yes, ?PRIMOP_DIV};
+call_to_primop(erlang, 'div', 2) -> {yes, ?PRIMOP_INTDIV};
+call_to_primop(erlang, 'rem', 2) -> {yes, ?PRIMOP_REM};
+call_to_primop(erlang, 'band', 2) -> {yes, ?PRIMOP_BAND};
+call_to_primop(erlang, 'bor', 2) -> {yes, ?PRIMOP_BOR};
+call_to_primop(erlang, 'bxor', 2) -> {yes, ?PRIMOP_BXOR};
+call_to_primop(erlang, 'bnot', 1) -> {yes, ?PRIMOP_BNOT};
+call_to_primop(erlang, 'bsl', 2) -> {yes, ?PRIMOP_BSL};
+call_to_primop(erlang, 'bsr', 2) -> {yes, ?PRIMOP_BSR};
+call_to_primop(erlang, '==', 2) -> {yes, ?PRIMOP_EQ};
+call_to_primop(erlang, '/=', 2) -> {yes, ?PRIMOP_NE};
+call_to_primop(erlang, '=:=', 2) -> {yes, ?PRIMOP_EXACT_EQ};
+call_to_primop(erlang, '=/=', 2) -> {yes, ?PRIMOP_EXACT_NE};
+call_to_primop(erlang, '<', 2) -> {yes, ?PRIMOP_LT};
+call_to_primop(erlang, '>', 2) -> {yes, ?PRIMOP_GT};
+call_to_primop(erlang, '=<', 2) -> {yes, ?PRIMOP_LE};
+call_to_primop(erlang, '>=', 2) -> {yes, ?PRIMOP_GE};
+call_to_primop(erlang, is_atom, 1) -> {yes, ?PRIMOP_IS_ATOM};
+call_to_primop(erlang, is_binary, 1) -> {yes, ?PRIMOP_IS_BINARY};
+call_to_primop(erlang, is_constant, 1) -> {yes, ?PRIMOP_IS_CONSTANT};
+call_to_primop(erlang, is_float, 1) -> {yes, ?PRIMOP_IS_FLOAT};
+call_to_primop(erlang, is_function, 1) -> {yes, ?PRIMOP_IS_FUNCTION};
+call_to_primop(erlang, is_integer, 1) -> {yes, ?PRIMOP_IS_INTEGER};
+call_to_primop(erlang, is_list, 1) -> {yes, ?PRIMOP_IS_LIST};
+call_to_primop(erlang, is_number, 1) -> {yes, ?PRIMOP_IS_NUMBER};
+call_to_primop(erlang, is_pid, 1) -> {yes, ?PRIMOP_IS_PID};
+call_to_primop(erlang, is_port, 1) -> {yes, ?PRIMOP_IS_PORT};
+call_to_primop(erlang, is_reference, 1) -> {yes, ?PRIMOP_IS_REFERENCE};
+call_to_primop(erlang, is_tuple, 1) -> {yes, ?PRIMOP_IS_TUPLE};
+call_to_primop(erlang, internal_is_record, 3) -> {yes, ?PRIMOP_IS_RECORD};
+call_to_primop(erlang, element, 2) -> {yes, ?PRIMOP_ELEMENT};
+call_to_primop(erlang, exit, 1) -> {yes, ?PRIMOP_EXIT};
+call_to_primop(erlang, throw, 1) -> {yes, ?PRIMOP_THROW};
+call_to_primop(erlang, error, 1) -> {yes, ?PRIMOP_ERROR};
+call_to_primop(erlang, error, 2) -> {yes, ?PRIMOP_ERROR};
+call_to_primop(erlang, fault, 1) -> {yes, ?PRIMOP_ERROR};
+call_to_primop(erlang, fault, 2) -> {yes, ?PRIMOP_ERROR};
+call_to_primop(_, _, _) -> no.
+
+%% Also, some primops (introduced by Erlang to Core Erlang translation
+%% and possibly other stages) must be recognized and rewritten.
+
+rewrite_primop(E, N, As, S) ->
+ case {cerl:atom_val(N), As} of
+ {match_fail, [R]} ->
+ M = s__get_module_name(S),
+ {F, A} = s__get_function_name(S),
+ Stack = cerl:abstract([{M, F, A}]),
+ case cerl:type(R) of
+ tuple ->
+ %% Function clause failures have a special encoding
+ %% as '{function_clause, Arg1, ..., ArgN}'.
+ case cerl:tuple_es(R) of
+ [X | Xs] ->
+ case cerl:is_c_atom(X) of
+ true ->
+ case cerl:atom_val(X) of
+ function_clause ->
+ FStack = cerl:make_list(
+ [cerl:c_tuple(
+ [cerl:c_atom(M),
+ cerl:c_atom(F),
+ cerl:make_list(Xs)])]),
+ match_fail(E, X, FStack);
+ _ ->
+ match_fail(E, R, Stack)
+ end;
+ false ->
+ match_fail(E, R, Stack)
+ end;
+ _ ->
+ match_fail(E, R, Stack)
+ end;
+ _ ->
+ match_fail(E, R, Stack)
+ end;
+ _ ->
+ cerl:update_c_primop(E, N, As)
+ end.
+
+match_fail(E, R, Stack) ->
+ cerl:update_c_primop(E, cerl:c_atom(?PRIMOP_ERROR), [R, Stack]).
+
+%% Simple let-definitions (of degree 1) in guard context are always
+%% inline expanded. This is allowable, since they cannot have side
+%% effects, and it makes it easy to generate good code for boolean
+%% expressions. It could cause repeated evaluations, but typically,
+%% local definitions within guards are used exactly once.
+
+let_expr(E, Env, Ren, Ctxt, S) ->
+ if Ctxt#ctxt.class == guard ->
+ case cerl:let_vars(E) of
+ [V] ->
+ {Name, Ren1} = rename(cerl:var_name(V), Env, Ren),
+ Env1 = env__bind(Name, {expr, cerl:let_arg(E)}, Env),
+ expr(cerl:let_body(E), Env1, Ren1, Ctxt, S);
+ _ ->
+ let_expr_1(E, Env, Ren, Ctxt, S)
+ end;
+ true ->
+ let_expr_1(E, Env, Ren, Ctxt, S)
+ end.
+
+let_expr_1(E, Env, Ren, Ctxt, S0) ->
+ {A, S1} = expr(cerl:let_arg(E), Env, Ren, Ctxt, S0),
+ Vs = cerl:let_vars(E),
+ {Vs1, Env1, Ren1} = add_vars(Vs, Env, Ren),
+ {B, S2} = expr(cerl:let_body(E), Env1, Ren1, Ctxt, S1),
+ {cerl:update_c_let(E, Vs1, A, B), S2}.
+
+variable(E, Env, Ren, Ctxt, S) ->
+ V = ren__map(cerl:var_name(E), Ren),
+ if Ctxt#ctxt.class == guard ->
+ case env__lookup(V, Env) of
+ {ok, {expr, E1}} ->
+ expr(E1, Env, Ren, Ctxt, S); % inline
+ _ ->
+ %% Since we don't track all bindings when we revisit
+ %% guards, some names will not be in the environment.
+ variable_1(E, V, S)
+ end;
+ true ->
+ variable_1(E, V, S)
+ end.
+
+variable_1(E, V, S) ->
+ {cerl:update_c_var(E, V), S}.
+
+%% A catch-expression 'catch Expr' is rewritten as:
+%%
+%% try Expr
+%% of (V) -> V
+%% catch (T, V, E) ->
+%% letrec 'wrap'/1 = fun (V) -> {'EXIT', V}
+%% in case T of
+%% 'throw' when 'true' -> V
+%% 'exit' when 'true' -> 'wrap'/1(V)
+%% V when 'true' ->
+%% 'wrap'/1({V, erlang:get_stacktrace()})
+%% end
+
+catch_expr(E, Env, Ren, Ctxt, S) ->
+ T = cerl:c_var('T'),
+ V = cerl:c_var('V'),
+ X = cerl:c_var('X'),
+ W = cerl:c_var({wrap,1}),
+ G = cerl:c_call(cerl:c_atom('erlang'),cerl:c_atom('get_stacktrace'),[]),
+ Cs = [cerl:c_clause([cerl:c_atom('throw')], V),
+ cerl:c_clause([cerl:c_atom('exit')], cerl:c_apply(W, [V])),
+ cerl:c_clause([T], cerl:c_apply(W, [cerl:c_tuple([V,G])]))
+ ],
+ C = cerl:c_case(T, Cs),
+ F = cerl:c_fun([V], cerl:c_tuple([cerl:c_atom('EXIT'), V])),
+ H = cerl:c_letrec([{W,F}], C),
+ As = cerl:get_ann(E),
+ {B, S1} = expr(cerl:catch_body(E),Env, Ren, Ctxt, S),
+ {cerl:ann_c_try(As, B, [V], V, [T,V,X], H), S1}.
+
+%% Receive-expressions are rewritten as follows:
+%%
+%% receive
+%% P1 when G1 -> B1
+%% ...
+%% Pn when Gn -> Bn
+%% after T -> A end
+%% becomes:
+%% receive
+%% M when 'true' ->
+%% case M of
+%% P1 when G1 -> do primop RECEIVE_SELECT B1
+%% ...
+%% Pn when Gn -> do primop RECEIVE_SELECT Bn
+%% Pn+1 when 'true' -> primop RECEIVE_NEXT()
+%% end
+%% after T -> A end
+
+receive_expr(E, Env, Ren, Ctxt, S0) ->
+ Cs = cerl:receive_clauses(E),
+ {B, Vs, S1} = clauses(receive_clauses(Cs), Env, Ren, Ctxt, S0),
+ {T, S2} = expr(cerl:receive_timeout(E), Env, Ren, Ctxt, S1),
+ {A, S3} = expr(cerl:receive_action(E), Env, Ren, Ctxt, S2),
+ Cs1 = [cerl:c_clause(Vs, B)],
+ {cerl:update_c_receive(E, Cs1, T, A), S3}.
+
+receive_clauses([C | Cs]) ->
+ Call = cerl:c_primop(cerl:c_atom(?PRIMOP_RECEIVE_SELECT),
+ []),
+ B = cerl:c_seq(Call, cerl:clause_body(C)),
+ C1 = cerl:update_c_clause(C, cerl:clause_pats(C),
+ cerl:clause_guard(C), B),
+ [C1 | receive_clauses(Cs)];
+receive_clauses([]) ->
+ Call = cerl:c_primop(cerl:c_atom(?PRIMOP_RECEIVE_NEXT),
+ []),
+ V = cerl:c_var('X'), % any name is ok
+ [cerl:c_clause([V], Call)].
+
+
+new_vars(N, Env) ->
+ [cerl:c_var(V) || V <- env__new_names(N, Env)].
+
+
+%% ---------------------------------------------------------------------
+%% Environment
+
+env__new() ->
+ rec_env:empty().
+
+env__bind(Key, Value, Env) ->
+ rec_env:bind(Key, Value, Env).
+
+%% env__get(Key, Env) ->
+%% rec_env:get(Key, Env).
+
+env__lookup(Key, Env) ->
+ rec_env:lookup(Key, Env).
+
+env__is_defined(Key, Env) ->
+ rec_env:is_defined(Key, Env).
+
+env__new_name(Env) ->
+ rec_env:new_key(Env).
+
+env__new_names(N, Env) ->
+ rec_env:new_keys(N, Env).
+
+env__new_function_name(F, Env) ->
+ rec_env:new_key(F, Env).
+
+
+%% ---------------------------------------------------------------------
+%% Renaming
+
+ren__new() ->
+ dict:new().
+
+ren__add(Key, Value, Ren) ->
+ dict:store(Key, Value, Ren).
+
+ren__map(Key, Ren) ->
+ case dict:find(Key, Ren) of
+ {ok, Value} ->
+ Value;
+ error ->
+ Key
+ end.
+
+
+%% ---------------------------------------------------------------------
+%% State
+
+-record(state, {module, function, pmatch=true}).
+
+s__new(Module) ->
+ #state{module = Module}.
+
+s__get_module_name(S) ->
+ S#state.module.
+
+s__enter_function(F, S) ->
+ S#state{function = F}.
+
+s__get_function_name(S) ->
+ S#state.function.
+
+s__set_pmatch(V, S) ->
+ S#state{pmatch = V}.
+
+s__get_pmatch(S) ->
+ S#state.pmatch.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_acceptor.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_acceptor.erl
new file mode 100644
index 0000000000..2aef625dc6
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_acceptor.erl
@@ -0,0 +1,120 @@
+% Copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : comm_acceptor.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description : Acceptor
+%%% This module accepts new connections and starts corresponding
+%%% comm_connection processes.
+%%%
+%%% Created : 18 Apr 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%% @version $Id $
+-module(comm_layer_dir.comm_acceptor).
+
+-export([start_link/1, init/2]).
+
+-import(config).
+-import(gen_tcp).
+-import(inet).
+-import(log).
+-import(lists).
+-import(process_dictionary).
+
+start_link(InstanceId) ->
+ Pid = spawn_link(comm_layer_dir.comm_acceptor, init, [InstanceId, self()]),
+ receive
+ {started} ->
+ {ok, Pid}
+ end.
+
+init(InstanceId, Supervisor) ->
+ process_dictionary:register_process(InstanceId, acceptor, self()),
+ erlang:register(comm_layer_acceptor, self()),
+ log:log(info,"[ CC ] listening on ~p:~p", [config:listenIP(), config:listenPort()]),
+ LS = case config:listenIP() of
+ undefined ->
+ open_listen_port(config:listenPort(), first_ip());
+ _ ->
+ open_listen_port(config:listenPort(), config:listenIP())
+ end,
+ {ok, {_LocalAddress, LocalPort}} = inet:sockname(LS),
+ comm_port:set_local_address(undefined, LocalPort),
+ %io:format("this() == ~w~n", [{LocalAddress, LocalPort}]),
+ Supervisor ! {started},
+ server(LS).
+
+server(LS) ->
+ case gen_tcp:accept(LS) of
+ {ok, S} ->
+ case comm_port:get_local_address_port() of
+ {undefined, LocalPort} ->
+ {ok, {MyIP, _LocalPort}} = inet:sockname(S),
+ comm_port:set_local_address(MyIP, LocalPort);
+ _ ->
+ ok
+ end,
+ receive
+ {tcp, S, Msg} ->
+ {endpoint, Address, Port} = binary_to_term(Msg),
+ % auto determine remote address, when not sent correctly
+ NewAddress = if Address =:= {0,0,0,0} orelse Address =:= {127,0,0,1} ->
+ case inet:peername(S) of
+ {ok, {PeerAddress, _Port}} ->
+ % io:format("Sent Address ~p\n",[Address]),
+ % io:format("Peername is ~p\n",[PeerAddress]),
+ PeerAddress;
+ {error, _Why} ->
+ % io:format("Peername error ~p\n",[Why]).
+ Address
+ end;
+ true ->
+ % io:format("Address is ~p\n",[Address]),
+ Address
+ end,
+ NewPid = comm_connection:new(NewAddress, Port, S),
+ gen_tcp:controlling_process(S, NewPid),
+ inet:setopts(S, [{active, once}, {send_timeout, config:read(tcp_send_timeout)}]),
+ comm_port:register_connection(NewAddress, Port, NewPid, S)
+ end,
+ server(LS);
+ Other ->
+ log:log(warn,"[ CC ] unknown message ~p", [Other])
+ end.
+
+open_listen_port({From, To}, IP) ->
+ open_listen_port(lists:seq(From, To), IP);
+open_listen_port([Port | Rest], IP) ->
+ case gen_tcp:listen(Port, [binary, {packet, 4}, {reuseaddr, true},
+ {active, once}, {ip, IP}]) of
+ {ok, Socket} ->
+ Socket;
+ {error, Reason} ->
+ log:log(error,"[ CC ] can't listen on ~p: ~p~n", [Port, Reason]),
+ open_listen_port(Rest, IP)
+ end;
+open_listen_port([], _) ->
+ abort;
+open_listen_port(Port, IP) ->
+ open_listen_port([Port], IP).
+
+-include_lib("kernel/include/inet.hrl").
+
+first_ip() ->
+ {ok, Hostname} = inet:gethostname(),
+ {ok, HostEntry} = inet:gethostbyname(Hostname),
+ erlang:hd(HostEntry#hostent.h_addr_list).
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_connection.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_connection.erl
new file mode 100644
index 0000000000..8dca647f6d
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_connection.erl
@@ -0,0 +1,206 @@
+% Copyright 2008 Konrad-Zuse-Zentrum f�r Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : comm_connection.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description : creates and destroys connections and represents the
+%%% endpoint of a connection where messages are received and
+%% send from/to the network.
+%%%
+%%% Created : 18 Apr 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum f�r Informationstechnik Berlin
+%% @version $Id $
+-module(comm_layer_dir.comm_connection).
+
+-export([send/3, open_new/4, new/3, open_new_async/4]).
+
+-import(config).
+-import(gen_tcp).
+-import(inet).
+-import(io).
+-import(io_lib).
+-import(log).
+-import(timer).
+
+-include("comm_layer.hrl").
+
+%% @doc new accepted connection. called by comm_acceptor
+%% @spec new(inet:ip_address(), int(), socket()) -> pid()
+new(Address, Port, Socket) ->
+ spawn(fun () -> loop(Socket, Address, Port) end).
+
+%% @doc open new connection
+%% @spec open_new(inet:ip_address(), int(), inet:ip_address(), int()) ->
+%% {local_ip, inet:ip_address(), int(), pid(), inet:socket()}
+%% | fail
+%% | {connection, pid(), inet:socket()}
+open_new(Address, Port, undefined, MyPort) ->
+ Myself = self(),
+ LocalPid = spawn(fun () ->
+ case new_connection(Address, Port, MyPort) of
+ fail ->
+ Myself ! {new_connection_failed};
+ Socket ->
+ {ok, {MyIP, _MyPort}} = inet:sockname(Socket),
+ Myself ! {new_connection_started, MyIP, MyPort, Socket},
+ loop(Socket, Address, Port)
+ end
+ end),
+ receive
+ {new_connection_failed} ->
+ fail;
+ {new_connection_started, MyIP, MyPort, S} ->
+ {local_ip, MyIP, MyPort, LocalPid, S}
+ end;
+open_new(Address, Port, _MyAddress, MyPort) ->
+ Owner = self(),
+ LocalPid = spawn(fun () ->
+ case new_connection(Address, Port, MyPort) of
+ fail ->
+ Owner ! {new_connection_failed};
+ Socket ->
+ Owner ! {new_connection_started, Socket},
+ loop(Socket, Address, Port)
+ end
+ end),
+ receive
+ {new_connection_failed} ->
+ fail;
+ {new_connection_started, Socket} ->
+ {connection, LocalPid, Socket}
+ end.
+
+% ===============================================================================
+% @doc open a new connection asynchronously
+% ===============================================================================
+-spec(open_new_async/4 :: (any(), any(), any(), any()) -> pid()).
+open_new_async(Address, Port, _MyAddr, MyPort) ->
+ Pid = spawn(fun () ->
+ case new_connection(Address, Port, MyPort) of
+ fail ->
+ comm_port:unregister_connection(Address, Port),
+ ok;
+ Socket ->
+ loop(Socket, Address, Port)
+ end
+ end),
+ Pid.
+
+
+send({Address, Port, Socket}, Pid, Message) ->
+ BinaryMessage = term_to_binary({deliver, Pid, Message}),
+ SendTimeout = config:read(tcp_send_timeout),
+ {Time, Result} = timer:tc(gen_tcp, send, [Socket, BinaryMessage]),
+ if
+ Time > 1200 * SendTimeout ->
+ log:log(error,"[ CC ] send to ~p took ~p: ~p",
+ [Address, Time, inet:getopts(Socket, [keep_alive, send_timeout])]);
+ true ->
+ ok
+ end,
+ case Result of
+ ok ->
+ ?LOG_MESSAGE(erlang:element(1, Message), byte_size(BinaryMessage)),
+ ok;
+ {error, closed} ->
+ comm_port:unregister_connection(Address, Port),
+ close_connection(Socket);
+ {error, _Reason} ->
+ %log:log(error,"[ CC ] couldn't send to ~p:~p (~p)", [Address, Port, Reason]),
+ comm_port:unregister_connection(Address, Port),
+ close_connection(Socket)
+ end.
+
+loop(fail, Address, Port) ->
+ comm_port:unregister_connection(Address, Port),
+ ok;
+loop(Socket, Address, Port) ->
+ receive
+ {send, Pid, Message} ->
+ case send({Address, Port, Socket}, Pid, Message) of
+ ok -> loop(Socket, Address, Port);
+ _ -> ok
+ end;
+ {tcp_closed, Socket} ->
+ comm_port:unregister_connection(Address, Port),
+ gen_tcp:close(Socket);
+ {tcp, Socket, Data} ->
+ case binary_to_term(Data) of
+ {deliver, Process, Message} ->
+ Process ! Message,
+ inet:setopts(Socket, [{active, once}]),
+ loop(Socket, Address, Port);
+ {user_close} ->
+ comm_port:unregister_connection(Address, Port),
+ gen_tcp:close(Socket);
+ {youare, _Address, _Port} ->
+ %% @TODO what do we get from this information?
+ inet:setopts(Socket, [{active, once}]),
+ loop(Socket, Address, Port);
+ Unknown ->
+ log:log(warn,"[ CC ] unknown message ~p", [Unknown]),
+ inet:setopts(Socket, [{active, once}]),
+ loop(Socket, Address, Port)
+ end;
+
+ {youare, _IP, _Port} ->
+ loop(Socket, Address, Port);
+
+ Unknown ->
+ log:log(warn,"[ CC ] unknown message2 ~p", [Unknown]) ,
+ loop(Socket, Address, Port)
+ end.
+
+% ===============================================================================
+
+-spec(new_connection(inet:ip_address(), integer(), integer()) -> inet:socket() | fail).
+new_connection(Address, Port, MyPort) ->
+ case gen_tcp:connect(Address, Port, [binary, {packet, 4}, {nodelay, true}, {active, once},
+ {send_timeout, config:read(tcp_send_timeout)}],
+ config:read(tcp_connect_timeout)) of
+ {ok, Socket} ->
+ % send end point data
+ case inet:sockname(Socket) of
+ {ok, {MyAddress, _MyPort}} ->
+ Message = term_to_binary({endpoint, MyAddress, MyPort}),
+ gen_tcp:send(Socket, Message),
+ case inet:peername(Socket) of
+ {ok, {RemoteIP, RemotePort}} ->
+ YouAre = term_to_binary({youare, RemoteIP, RemotePort}),
+ gen_tcp:send(Socket, YouAre),
+ Socket;
+ {error, _Reason} ->
+ %log:log(error,"[ CC ] reconnect to ~p because socket is ~p",
+ % [Address, Reason]),
+ close_connection(Socket),
+ new_connection(Address, Port, MyPort)
+ end;
+ {error, _Reason} ->
+ %log:log(error,"[ CC ] reconnect to ~p because socket is ~p",
+ % [Address, Reason]),
+ close_connection(Socket),
+ new_connection(Address, Port, MyPort)
+ end;
+ {error, _Reason} ->
+ %log:log(error,"[ CC ] couldn't connect to ~p:~p (~p)",
+ %[Address, Port, Reason]),
+ fail
+ end.
+
+close_connection(Socket) ->
+ spawn( fun () ->
+ gen_tcp:close(Socket)
+ end ).
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.erl
new file mode 100644
index 0000000000..f48324e49c
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.erl
@@ -0,0 +1,83 @@
+% Copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : comm_layer.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description : Public interface to Communication Layer.
+%%% Generic functions to send messages.
+%%% Distinguishes on runtime whether the destination is in the
+%%% same Erlang virtual machine (use ! for sending) or on a remote
+%%% site (use comm_port:send()).
+%%%
+%%% Created : 04 Feb 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%% @version $Id $
+-module(comm_layer_dir.comm_layer).
+
+-author('[email protected]').
+-vsn('$Id: comm_layer.erl,v 1.1 2009/11/06 12:41:36 maria Exp $ ').
+
+-export([start_link/0, send/2, this/0, here/1]).
+
+-import(io).
+-import(util).
+-import(log).
+
+-include("comm_layer.hrl").
+
+
+% @TODO: should be ip
+-type(process_id() :: {any(), integer(), pid()}).
+%%====================================================================
+%% public functions
+%%====================================================================
+
+%% @doc starts the communication port (for supervisor)
+%% @spec start_link() -> {ok,Pid} | ignore | {error,Error}
+start_link() ->
+ comm_port_sup:start_link().
+
+%% @doc a process descriptor has to specify the erlang vm
+%% + the process inside. {IP address, port, pid}
+%% @type process_id() = {inet:ip_address(), int(), pid()}.
+%% @spec send(process_id(), term()) -> ok
+
+send({{_IP1, _IP2, _IP3, _IP4} = _IP, _Port, _Pid} = Target, Message) ->
+ {MyIP,MyPort} = comm_port:get_local_address_port(),
+ %io:format("send: ~p:~p -> ~p:~p(~p) : ~p\n", [MyIP, MyPort, _IP, _Port, _Pid, Message]),
+ IsLocal = (MyIP == _IP) and (MyPort == _Port),
+ if
+ IsLocal ->
+ ?LOG_MESSAGE(erlang:element(1, Message), byte_size(term_to_binary(Message))),
+ _Pid ! Message;
+ true ->
+ comm_port:send(Target, Message)
+ end;
+
+send(Target, Message) ->
+ log:log(error,"[ CC ] wrong call to cs_send:send: ~w ! ~w", [Target, Message]),
+ log:log(error,"[ CC ] stacktrace: ~w", [util:get_stacktrace()]),
+ ok.
+
+%% @doc returns process descriptor for the calling process
+-spec(this/0 :: () -> atom()).%process_id()).
+this() ->
+ here(self()).
+
+-spec(here/1 :: (pid()) -> process_id()).
+here(Pid) ->
+ {LocalIP, LocalPort} = comm_port:get_local_address_port(),
+ {LocalIP, LocalPort, Pid}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.hrl b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.hrl
new file mode 100644
index 0000000000..f4e4d560f7
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_layer.hrl
@@ -0,0 +1,30 @@
+% Copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : comm_layer.hrl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description :
+%%%
+%%% Created : 31 Jul 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%% @version $Id: comm_layer.hrl,v 1.1 2009/11/06 12:41:36 maria Exp $
+-author('[email protected]').
+-vsn('$Id: comm_layer.hrl,v 1.1 2009/11/06 12:41:36 maria Exp $ ').
+
+% enable logging of message statistics
+%-define(LOG_MESSAGE(TAG, SIZE), comm_layer.comm_logger:log(TAG, SIZE)).
+-define(LOG_MESSAGE(TAG, SIZE), ok).
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_logger.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_logger.erl
new file mode 100644
index 0000000000..c70b0d3438
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_logger.erl
@@ -0,0 +1,143 @@
+% Copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : comm_logger.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description :
+%%%
+%%% Created : 31 Jul 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%% @version $Id: comm_logger.erl,v 1.1 2009/11/06 12:41:36 maria Exp $
+-module(comm_layer_dir.comm_logger).
+
+-author('[email protected]').
+-vsn('$Id: comm_logger.erl,v 1.1 2009/11/06 12:41:36 maria Exp $ ').
+
+-behaviour(gen_server).
+
+-import(gb_trees).
+-import(gen_server).
+
+%% API
+-export([start_link/0]).
+
+-export([log/2, dump/0]).
+
+%% gen_server callbacks
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-record(state, {start, map}).
+
+%%====================================================================
+%% API
+%%====================================================================
+%%--------------------------------------------------------------------
+%% Function: start_link() -> {ok,Pid} | ignore | {error,Error}
+%% Description: Starts the server
+%%--------------------------------------------------------------------
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+%%--------------------------------------------------------------------
+%% Function: log(Tag, Size) -> ok
+%% Description: logs a message type with its size
+%%--------------------------------------------------------------------
+log(Tag, Size) ->
+ gen_server:cast(?MODULE, {log, Tag, Size}).
+
+%%--------------------------------------------------------------------
+%% Function: dump() -> {gb_tree:gb_trees(), {Date, Time}}
+%% Description: gets the logging state
+%%--------------------------------------------------------------------
+dump() ->
+ gen_server:call(?MODULE, {dump}).
+
+%%====================================================================
+%% gen_server callbacks
+%%====================================================================
+
+%%--------------------------------------------------------------------
+%% Function: init(Args) -> {ok, State} |
+%% {ok, State, Timeout} |
+%% ignore |
+%% {stop, Reason}
+%% Description: Initiates the server
+%%--------------------------------------------------------------------
+init([]) ->
+ {ok, #state{start=erlang:now(), map=gb_trees:empty()}}.
+
+%%--------------------------------------------------------------------
+%% Function: %% handle_call(Request, From, State) -> {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} |
+%% {stop, Reason, State}
+%% Description: Handling call messages
+%%--------------------------------------------------------------------
+handle_call({dump}, _From, State) ->
+ Reply = {State#state.map, State#state.start},
+ {reply, Reply, State};
+handle_call(_Request, _From, State) ->
+ Reply = ok,
+ {reply, Reply, State}.
+
+%%--------------------------------------------------------------------
+%% Function: handle_cast(Msg, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State}
+%% Description: Handling cast messages
+%%--------------------------------------------------------------------
+handle_cast({log, Tag, Size}, State) ->
+ case gb_trees:lookup(Tag, State#state.map) of
+ none ->
+ {noreply, State#state{map=gb_trees:insert(Tag, {Size, 1}, State#state.map)}};
+ {value, {OldSize, OldCount}} ->
+ {noreply, State#state{map=gb_trees:update(Tag, {Size + OldSize, OldCount + 1}, State#state.map)}}
+ end;
+handle_cast(_Msg, State) ->
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% Function: handle_info(Info, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State}
+%% Description: Handling all non call/cast messages
+%%--------------------------------------------------------------------
+handle_info(_Info, State) ->
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% Function: terminate(Reason, State) -> void()
+%% Description: This function is called by a gen_server when it is about to
+%% terminate. It should be the opposite of Module:init/1 and do any necessary
+%% cleaning up. When it returns, the gen_server terminates with Reason.
+%% The return value is ignored.
+%%--------------------------------------------------------------------
+terminate(_Reason, _State) ->
+ ok.
+
+%%--------------------------------------------------------------------
+%% Func: code_change(OldVsn, State, Extra) -> {ok, NewState}
+%% Description: Convert process state when code is changed
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port.erl
new file mode 100644
index 0000000000..5eded48750
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port.erl
@@ -0,0 +1,240 @@
+% Copyright 2008 Konrad-Zuse-Zentrum f�r Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : comm_port.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description : Main CommLayer Interface
+%%% Maps remote addresses to comm_connection PIDs.
+%%%
+%%% Created : 18 Apr 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum f�r Informationstechnik Berlin
+%% @version $Id $
+-module(comm_layer_dir.comm_port).
+
+-author('[email protected]').
+-vsn('$Id: comm_port.erl,v 1.1 2009/11/06 12:41:36 maria Exp $ ').
+
+-behaviour(gen_server).
+
+-import(ets).
+-import(gen_server).
+-import(io).
+-import(log).
+
+-define(ASYNC, true).
+%-define(SYNC, true).
+
+%% API
+-export([start_link/0,
+ send/2,
+ unregister_connection/2, register_connection/4,
+ set_local_address/2, get_local_address_port/0]).
+
+%% gen_server callbacks
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+%%====================================================================
+%% API
+%%====================================================================
+
+%% @doc
+%% @spec send({inet:ip_address(), int(), pid()}, term()) -> ok
+-ifdef(ASYNC).
+send({Address, Port, Pid}, Message) ->
+ gen_server:call(?MODULE, {send, Address, Port, Pid, Message}, 20000).
+-endif.
+-ifdef(SYNC).
+send({Address, Port, Pid}, Message) ->
+ case ets:lookup(?MODULE, {Address, Port}) of
+ [{{Address, Port}, {_LPid, Socket}}] ->
+ comm_connection:send({Address, Port, Socket}, Pid, Message),
+ ok;
+ [] ->
+ gen_server:call(?MODULE, {send, Address, Port, Pid, Message}, 20000)
+ end.
+-endif.
+
+
+%% @doc
+%% @spec unregister_connection(inet:ip_address(), int()) -> ok
+unregister_connection(Adress, Port) ->
+ gen_server:call(?MODULE, {unregister_conn, Adress, Port}, 20000).
+
+%% @doc
+%% @spec register_connection(inet:ip_address(), int(), pid(), gen_tcp:socket()) -> ok | duplicate
+register_connection(Adress, Port, Pid, Socket) ->
+ gen_server:call(?MODULE, {register_conn, Adress, Port, Pid, Socket}, 20000).
+
+%% @doc
+%% @spec set_local_address(inet:ip_address(), int()) -> ok
+set_local_address(Address, Port) ->
+ gen_server:call(?MODULE, {set_local_address, Address, Port}, 20000).
+
+
+%% @doc
+%% @spec get_local_address_port() -> {inet:ip_address(),int()}
+get_local_address_port() ->
+ case ets:lookup(?MODULE, local_address_port) of
+ [{local_address_port, Value}] ->
+ Value;
+ [] ->
+ undefined
+ end.
+
+%%--------------------------------------------------------------------
+%% Function: start_link() -> {ok,Pid} | ignore | {error,Error}
+%% Description: Starts the server
+%%--------------------------------------------------------------------
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+%%====================================================================
+%% gen_server callbacks
+%%====================================================================
+
+%%--------------------------------------------------------------------
+%% Function: init(Args) -> {ok, State} |
+%% {ok, State, Timeout} |
+%% ignore |
+%% {stop, Reason}
+%% Description: Initiates the server
+%%--------------------------------------------------------------------
+init([]) ->
+ ets:new(?MODULE, [set, protected, named_table]),
+ {ok, ok}. % empty state.
+
+%%--------------------------------------------------------------------
+%% Function: %% handle_call(Request, From, State) -> {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} |
+%% {stop, Reason, State}
+%% Description: Handling call messages
+%%--------------------------------------------------------------------
+handle_call({send, Address, Port, Pid, Message}, _From, State) ->
+ send(Address, Port, Pid, Message, State);
+
+handle_call({unregister_conn, Address, Port}, _From, State) ->
+ ets:delete(?MODULE, {Address, Port}),
+ {reply, ok, State};
+
+handle_call({register_conn, Address, Port, Pid, Socket}, _From, State) ->
+ case ets:lookup(?MODULE, {Address, Port}) of
+ [{{Address, Port}, _}] ->
+ {reply, duplicate, State};
+ [] ->
+ ets:insert(?MODULE, {{Address, Port}, {Pid, Socket}}),
+ {reply, ok, State}
+ end;
+
+handle_call({set_local_address, Address, Port}, _From, State) ->
+ ets:insert(?MODULE, {local_address_port, {Address,Port}}),
+ {reply, ok, State}.
+
+%%--------------------------------------------------------------------
+%% Function: handle_cast(Msg, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State}
+%% Description: Handling cast messages
+%%--------------------------------------------------------------------
+handle_cast(_Msg, State) ->
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% Function: handle_info(Info, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State}
+%% Description: Handling all non call/cast messages
+%%--------------------------------------------------------------------
+handle_info(_Info, State) ->
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% Function: terminate(Reason, State) -> void()
+%% Description: This function is called by a gen_server when it is about to
+%% terminate. It should be the opposite of Module:init/1 and do any necessary
+%% cleaning up. When it returns, the gen_server terminates with Reason.
+%% The return value is ignored.
+%%--------------------------------------------------------------------
+terminate(_Reason, _State) ->
+ ok.
+
+%%--------------------------------------------------------------------
+%% Func: code_change(OldVsn, State, Extra) -> {ok, NewState}
+%% Description: Convert process state when code is changed
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+-ifdef(ASYNC).
+send(Address, Port, Pid, Message, State) ->
+ {DepAddr,DepPort} = get_local_address_port(),
+ if
+ DepAddr == undefined ->
+ open_sync_connection(Address, Port, Pid, Message, State);
+ true ->
+ case ets:lookup(?MODULE, {Address, Port}) of
+ [{{Address, Port}, {ConnPid, _Socket}}] ->
+ ConnPid ! {send, Pid, Message},
+ {reply, ok, State};
+ [] ->
+ ConnPid = comm_connection:open_new_async(Address, Port,
+ DepAddr, DepPort),
+ ets:insert(?MODULE, {{Address, Port}, {ConnPid, undef}}),
+ ConnPid ! {send, Pid, Message},
+ {reply, ok, State}
+ end
+ end.
+-endif.
+
+-ifdef(SYNC).
+send(Address, Port, Pid, Message, State) ->
+ case ets:lookup(?MODULE, {Address, Port}) of
+ [{{Address, Port}, {_LPid, Socket}}] ->
+ comm_connection:send({Address, Port, Socket}, Pid, Message),
+ {reply, ok, State};
+ [] ->
+ open_sync_connection(Address, Port, Pid, Message, State)
+ end.
+-endif.
+
+
+open_sync_connection(Address, Port, Pid, Message, State) ->
+ {DepAddr,DepPort} = get_local_address_port(),
+ case comm_connection:open_new(Address, Port, DepAddr, DepPort) of
+ {local_ip, MyIP, MyPort, MyPid, MySocket} ->
+ comm_connection:send({Address, Port, MySocket}, Pid, Message),
+ log:log(info,"[ CC ] this() == ~w", [{MyIP, MyPort}]),
+ % set_local_address(t, {MyIP,MyPort}}),
+ % register_connection(Address, Port, MyPid, MySocket),
+ ets:insert(?MODULE, {local_address_port, {MyIP,MyPort}}),
+ ets:insert(?MODULE, {{Address, Port}, {MyPid, MySocket}}),
+ {reply, ok, State};
+ fail ->
+ % drop message (remote node not reachable, failure detector will notice)
+ {reply, ok, State};
+ {connection, LocalPid, NewSocket} ->
+ comm_connection:send({Address, Port, NewSocket}, Pid, Message),
+ ets:insert(?MODULE, {{Address, Port}, {LocalPid, NewSocket}}),
+ % register_connection(Address, Port, LPid, NewSocket),
+ {reply, ok, State}
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port_sup.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port_sup.erl
new file mode 100644
index 0000000000..622d0a8c06
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/comm_layer/comm_port_sup.erl
@@ -0,0 +1,90 @@
+% Copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : comm_port_sup.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description :
+%%%
+%%% Created : 04 Feb 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%% @version $Id: comm_port_sup.erl,v 1.1 2009/11/06 12:41:36 maria Exp $
+-module(comm_layer_dir.comm_port_sup).
+
+-author('[email protected]').
+-vsn('$Id: comm_port_sup.erl,v 1.1 2009/11/06 12:41:36 maria Exp $ ').
+
+-behaviour(supervisor).
+
+-import(supervisor).
+-import(randoms).
+-import(string).
+-import(config).
+
+-export([start_link/0, init/1]).
+
+%%====================================================================
+%% API functions
+%%====================================================================
+%%--------------------------------------------------------------------
+%% Function: start_link() -> {ok,Pid} | ignore | {error,Error}
+%% Description: Starts the supervisor
+%%--------------------------------------------------------------------
+start_link() ->
+ supervisor:start_link(?MODULE, []).
+
+%%====================================================================
+%% Supervisor callbacks
+%%====================================================================
+%%--------------------------------------------------------------------
+%% Func: init(Args) -> {ok, {SupFlags, [ChildSpec]}} |
+%% ignore |
+%% {error, Reason}
+%% Description: Whenever a supervisor is started using
+%% supervisor:start_link/[2,3], this function is called by the new process
+%% to find out about restart strategy, maximum restart frequency and child
+%% specifications.
+%%--------------------------------------------------------------------
+init([]) ->
+ InstanceId = string:concat("comm_port_", randoms:getRandomId()),
+ CommPort =
+ {comm_port,
+ {comm_layer_dir.comm_port, start_link, []},
+ permanent,
+ brutal_kill,
+ worker,
+ []},
+ CommAcceptor =
+ {comm_acceptor,
+ {comm_layer_dir.comm_acceptor, start_link, [InstanceId]},
+ permanent,
+ brutal_kill,
+ worker,
+ []},
+ CommLogger =
+ {comm_logger,
+ {comm_layer_dir.comm_logger, start_link, []},
+ permanent,
+ brutal_kill,
+ worker,
+ []},
+ {ok, {{one_for_all, 10, 1},
+ [
+ CommPort,
+ CommLogger,
+ CommAcceptor
+ ]}}.
+
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/compare1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/compare1.erl
new file mode 100644
index 0000000000..2626d2ebea
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/compare1.erl
@@ -0,0 +1,21 @@
+%%%-------------------------------------------------------------------
+%%% File : compare1.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 20 Apr 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(compare1).
+
+-export([t/0]).
+
+t() ->
+ t(42).
+
+t(X) when X > 42 ->
+ error;
+t(X) when X < 42 ->
+ error;
+t(X) when X =/= 42 ->
+ error;
+t(X) -> ok.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/confusing_warning.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/confusing_warning.erl
new file mode 100644
index 0000000000..c82df0f056
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/confusing_warning.erl
@@ -0,0 +1,22 @@
+%% Test case that results in a confusing warning -- created from a
+%% very stripped down actual application. The second case clause of
+%% test/1 cannot possibly match because all a-pairs match with the
+%% first clause. Dialyzer complains that the second argument of the
+%% second 2-tuple has type 'aaa' | 'bbb'. This is mucho confusing
+%% since there is no 'a'-pair whose second element is 'aaa' | 'bbb'.
+%% Pattern matching compilation is of course what's to blame here.
+
+-module(confusing_warning).
+-export([test/1]).
+
+test(N) when is_integer(N) ->
+ case foo(N) of
+ {a, I} when is_integer(I) ->
+ I;
+ {a, {_, L}} -> % this clause cannot possibly match
+ L
+ end.
+
+foo(1) -> {a, 42};
+foo(2) -> {b, aaa}; % this is really unused
+foo(3) -> {b, bbb}. % this is really unused
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/contract2.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/contract2.erl
new file mode 100644
index 0000000000..83ee5910f2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/contract2.erl
@@ -0,0 +1,18 @@
+-module(contract2).
+-export([test/2]).
+
+-spec test(list(), list()) -> ok.
+
+test([], []) ->
+ ok;
+test([], L) ->
+ raise(L);
+test([H|T], L) ->
+ case H of
+ true -> test(T, L);
+ false -> test(T, [H|L])
+ end.
+
+-spec raise(_) -> no_return().
+raise(X) ->
+ throw(X).
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/contract3.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/contract3.erl
new file mode 100644
index 0000000000..c135b72d45
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/contract3.erl
@@ -0,0 +1,34 @@
+%%%-------------------------------------------------------------------
+%%% File : contract3.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description : Check overloaded domains
+%%%
+%%% Created : 2 Nov 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(contract3).
+
+-export([t/3]).
+
+t(X, Y, Z) ->
+ t1(X),
+ t2(X, Y),
+ t3(X, Y, Z).
+
+-spec t1(atom()|integer()) -> integer();
+ (atom()|list()) -> atom().
+
+t1(X) ->
+ foo:bar(X).
+
+-spec t2(atom(), integer()) -> integer();
+ (atom(), list()) -> atom().
+
+t2(X, Y) ->
+ foo:bar(X, Y).
+
+-spec t3(atom(), integer(), list()) -> integer();
+ (X, integer(), list()) -> X.
+
+t3(X, Y, Z) ->
+ X.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/contract5.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/contract5.erl
new file mode 100644
index 0000000000..6385473c20
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/contract5.erl
@@ -0,0 +1,15 @@
+%%%-------------------------------------------------------------------
+%%% File : contract5.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description : Excercise modified record types.
+%%%
+%%% Created : 15 Apr 2008 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(contract5).
+-export([t/0]).
+
+-record(bar, {baz}).
+
+-spec t() -> #bar{baz :: boolean()}.
+
+t() -> #bar{baz = not_a_boolean}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/disj_norm_form.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/disj_norm_form.erl
new file mode 100644
index 0000000000..313c2e8b86
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/disj_norm_form.erl
@@ -0,0 +1,23 @@
+%%%-------------------------------------------------------------------
+%%% File : disj_norm_form.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description : Exposes a bad behavior in expansion to
+%%% disjunctive normal form of guards.
+%%%
+%%% Created : 24 Aug 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(disj_norm_form).
+
+-export([t/1]).
+
+-record(foo, {bar}).
+
+t(R) ->
+ if R#foo.bar =:= 1;
+ R#foo.bar =:= 2;
+ R#foo.bar =:= 3;
+ R#foo.bar =:= 4;
+ R#foo.bar =:= 5;
+ R#foo.bar =:= 6 -> ok;
+ true -> error
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/eqeq.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/eqeq.erl
new file mode 100644
index 0000000000..6767023e3a
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/eqeq.erl
@@ -0,0 +1,16 @@
+%%%-------------------------------------------------------------------
+%%% File : eqeq.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 12 Nov 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(eqeq).
+
+-export([t/0]).
+
+t() ->
+ comp(3.14, foo).
+
+comp(X, Y) -> X =:= Y.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/ets_select.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/ets_select.erl
new file mode 100644
index 0000000000..2b3c38cd59
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/ets_select.erl
@@ -0,0 +1,12 @@
+-module(ets_select).
+-export([test/0]).
+
+test() ->
+ Table = ets:new(table, [set,{keypos,1}]),
+ ets:insert(Table, {foo, bar, baz}),
+ foo(Table). % ets:select(Table, [{{'_', '$1', '$2'}, [], ['$$']}]).
+
+foo(Table) ->
+ Tuples = ets:select(Table, [{{'_', '$1', '$2'}, [], ['$$']}]),
+ [list_to_tuple(Tuple) || Tuple <- Tuples].
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/exhaust_case.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/exhaust_case.erl
new file mode 100644
index 0000000000..6b20c7c98c
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/exhaust_case.erl
@@ -0,0 +1,24 @@
+%%-------------------------------------------------------------------
+%% File : exhaust_case.erl
+%% Author : Kostis Sagonas <[email protected]>
+%% Description : Tests that Dialyzer warns when it finds an unreachable
+%% case clause (independently of whether ground vs. var).
+%%
+%% Created : 15 Dec 2004 by Kostis Sagonas <[email protected]>
+%%-------------------------------------------------------------------
+
+-module(exhaust_case).
+-export([t/1]).
+
+t(X) when is_integer(X) ->
+ case ret(X) of
+ foo -> ok;
+ bar -> ok;
+ 42 -> ok;
+ _other -> error %% unreachable clause (currently no warning)
+ %% other -> error %% but contrast this with this clause... hmm
+ end.
+
+ret(1) -> foo;
+ret(2) -> bar.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/failing_guard1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/failing_guard1.erl
new file mode 100644
index 0000000000..8fa1ce9ce0
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/failing_guard1.erl
@@ -0,0 +1,16 @@
+%%-----------------------------------------------------------------------
+%% Author: Kostis Sagonas (Wed Aug 23 14:54:25 CEST 2006)
+%%
+%% Program to test failing arithmetic comparisons with a number of the
+%% wrong type. The first case is handled properly; the second one is not.
+%% Why?
+%%-----------------------------------------------------------------------
+
+-module(failing_guard1).
+-export([n/1]).
+
+n(N) when (N / 2) =:= 2 -> multiple_of_four;
+n(N) when (N div 3) =:= 2.0 -> multiple_of_six;
+n(N) when (N rem 3) =:= 2.0 -> multiple_of_six;
+n(N) when is_number(N) -> other_number.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/flatten.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/flatten.erl
new file mode 100644
index 0000000000..ac28fe27c9
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/flatten.erl
@@ -0,0 +1,18 @@
+%%%-------------------------------------------------------------------
+%%% File : flatten.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 4 Nov 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(flatten).
+
+-export([t/1]).
+
+t(Dir) ->
+ case file:list_dir(Dir) of
+ {ok,FileList} ->
+ FileList;
+ {error,Reason} ->
+ {error,lists:flatten("Can't open directory "++Dir++": "++Reason)}
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/fun_app.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/fun_app.erl
new file mode 100644
index 0000000000..605b0799d1
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/fun_app.erl
@@ -0,0 +1,42 @@
+%% This is taken from the code of distel.
+
+-module(fun_app).
+-export([html_index/2]). % , lines/3, curry/2]).
+
+html_index(file,Dir) ->
+ fold_file(curry(fun lines/3,Dir),[],filename:join([Dir,"doc","man_index.html"])).
+
+fold_file(Fun,Acc0,File) ->
+ {ok, FD} = file:open(File, [read]),
+ Acc = fold_file_lines(FD,Fun,Acc0),
+ file:close(FD),
+ Acc.
+
+fold_file_lines(FD,Fun,Acc) ->
+ case io:get_line(FD, "") of
+ eof -> Acc;
+ Line -> fold_file_lines(FD,Fun,Fun(trim_nl(Line),Acc))
+ end.
+
+trim_nl(Str) -> lists:reverse(tl(lists:reverse(Str))).
+
+lines(Line,_,Dir) ->
+ case string:tokens(Line, "<> \"") of
+ ["TD", "A", "HREF=", "../"++Href, M|_] ->
+ case filename:basename(Href, ".html") of
+ "index" -> ok;
+ M -> e_set({file,M}, filename:join([Dir,Href]))
+ end;
+ _ -> ok
+ end.
+
+e_set(Key,Val) -> ets:insert(?MODULE, {Key,Val}).
+
+curry(F, Arg) ->
+ case erlang:fun_info(F,arity) of
+ {_,1} -> fun() -> F(Arg) end;
+ {_,2} -> fun(A) -> F(A,Arg) end;
+ {_,3} -> fun(A,B) -> F(A,B,Arg) end;
+ {_,4} -> fun(A,B,C) -> F(A,B,C,Arg) end
+ end.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_match.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_match.erl
new file mode 100644
index 0000000000..c15226ba6e
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_match.erl
@@ -0,0 +1,21 @@
+%%%-------------------------------------------------------------------
+%%% File : fun_ref_match.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description : Find that newly created funs and references cannot
+%%% match on earlier bound variables.
+%%%
+%%% Created : 10 Mar 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(fun_ref_match).
+
+-export([t1/1, t2/1]).
+
+t1(X) ->
+ X = fun(Y) -> Y end,
+ ok.
+
+t2(X) ->
+ case make_ref() of
+ X -> error;
+ _ -> ok
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_record.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_record.erl
new file mode 100644
index 0000000000..eace7a4332
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/fun_ref_record.erl
@@ -0,0 +1,17 @@
+%%%-------------------------------------------------------------------
+%%% File : fun_ref_record.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description : Exposes a bug when referring to a fun in a record.
+%%%
+%%% Created : 25 Sep 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(fun_ref_record).
+
+-export([t1/0, t2/0]).
+
+-record(foo, {bar}).
+
+t1() ->
+ #foo{bar=fun t2/0}.
+
+t2() -> ok.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/gencall.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/gencall.erl
new file mode 100644
index 0000000000..d2875c9df1
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/gencall.erl
@@ -0,0 +1,12 @@
+%% Error: gen_server:handle_cast/2 is not logged as an unexported func
+%% but unknown function.
+-module(gencall).
+
+-export([f/0]).
+
+f() ->
+ gen_server:call(1,2,3),
+ ets:lookup(1,2,3),
+ gencall2:foo(),
+ gencall:foo(),
+ gen_server:handle_cast(1,2).
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/gs_make.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/gs_make.erl
new file mode 100644
index 0000000000..cbf3ef5dcb
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/gs_make.erl
@@ -0,0 +1,261 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: gs_make.erl,v 1.1 2008/12/17 09:53:50 mikpe Exp $
+%%
+-module(gs_make).
+
+-export([start/0]).
+
+start() ->
+ Terms = the_config(),
+ DB=fill_ets(Terms),
+ {ok,OutFd} = file:open("gstk_generic.hrl", [write]),
+ put(stdout,OutFd),
+% io:format("terms: ~p ~n ets:~p~n",[Terms,ets:tab2list(DB)]),
+ p("% Don't edit this file. It was generated by gs_make:start/0 "),
+ p("at ~p-~p-~p, ~p:~p:~p.\n\n",
+ lists:append(tuple_to_list(date()),tuple_to_list(time()))),
+ gen_out_opts(DB),
+ gen_read(DB),
+ file:close(OutFd),
+ {ok,"gstk_generic.hrl",DB}.
+
+fill_ets(Terms) ->
+ DB = ets:new(gs_mapping,[bag,public]),
+ fill_ets(DB,Terms).
+
+fill_ets(DB,[]) -> DB;
+fill_ets(DB,[{Objs,Opt,Fun,Access}|Terms]) ->
+ fill_ets(DB,lists:flatten(Objs),Opt,Fun,Access),
+ fill_ets(DB,Terms).
+
+fill_ets(_DB,[],_,_,_) -> done;
+fill_ets(DB,[Obj|Objs],Opt,Fun,rw) ->
+ ets:insert(DB,{Obj,Opt,Fun,read}),
+ ets:insert(DB,{Obj,Opt,Fun,write}),
+ fill_ets(DB,Objs,Opt,Fun,rw);
+fill_ets(DB,[Obj|Objs],Opt,Fun,r) ->
+ ets:insert(DB,{Obj,Opt,Fun,read}),
+ fill_ets(DB,Objs,Opt,Fun,r);
+fill_ets(DB,[Obj|Objs],Opt,Fun,w) ->
+ ets:insert(DB,{Obj,Opt,Fun,write}),
+ fill_ets(DB,Objs,Opt,Fun,w).
+
+
+
+gen_out_opts(DB) ->
+ ObjTypes = lists:flatten(ordsets:from_list(ets:match(DB,{'$1','_','_',write}))),
+ p("out_opts([Option|Options],Gstkid,TkW,DB,ExtraArg,S,P,C) ->\n"),
+ p(" {Opt,Val} =\n"),
+ p(" case Option of \n"),
+ p(" {{default,Cat,Key},V} -> {default,{Cat,{Key,V}}};\n"),
+ p(" {_Key,_V} -> Option;\n"),
+ p(" {default,Cat,Opti} -> {default,{Cat,Opti}};\n"),
+ p(" Atom when atom(Atom) -> {Atom,undefined};\n"),
+ p(" _ -> {error, {invalid_option,Option}}\n"),
+ p(" end,\n"),
+ p(" case Gstkid#gstkid.objtype of\n"),
+ gen_out_type_case_clauses(merge_types(ObjTypes),DB),
+ p(" Q -> exit({internal_error,unknown_objtype,Q})\n"),
+ p(" end;\n"),
+ p("out_opts([],_Gstkid,_TkW,_DB,_ExtraArg,S,P,C) -> \n"),
+ p(" {S,P,C}.\n").
+
+
+gen_out_type_case_clauses([],_DB) -> done;
+gen_out_type_case_clauses([Objtype|Objtypes],DB) ->
+ OptsFuns = lists:map(fun (L) -> list_to_tuple(L) end,
+ ets:match(DB,{Objtype,'$1','$2',write})),
+ p(" ~p -> \ncase Opt of\n",[Objtype]),
+ gen_opt_case_clauses(merge_opts(opt_prio(),OptsFuns)),
+ p(" _ -> \n"),
+ p(" handle_external_opt_call([Option|Options],Gstkid,TkW,DB,ExtraArg,"
+ " gstk_~p:option(Option,Gstkid,TkW,DB,ExtraArg),S,P,C)\n",
+ [Objtype]),
+ p(" end;\n"),
+ gen_out_type_case_clauses(Objtypes,DB).
+
+gen_opt_case_clauses([]) ->
+ done;
+gen_opt_case_clauses([{Opt,Fun}|OptFuncs]) ->
+ p(" ~p ->\n",[Opt]),
+ p(" ~p(Val,Options,Gstkid,TkW,DB,ExtraArg,S,P,C);\n",[Fun]),
+ gen_opt_case_clauses(OptFuncs).
+
+gen_read(DB) ->
+ ObjTypes = lists:flatten(ordsets:from_list(ets:match(DB,{'$1','_','_',read}))),
+ p("read_option(DB,Gstkid,TkW,Option,ExtraArg) ->\n"),
+ p(" Key = case Option of\n"),
+ p(" Atom when atom(Atom) -> Atom;\n"),
+ p(" Opt when tuple(Opt) -> element(1,Opt)\n"),
+ p(" end,\n"),
+ p(" case Gstkid#gstkid.objtype of\n"),
+ gen_read_type_clauses(merge_types(ObjTypes),DB),
+ p(" Q -> exit({internal_error,unknown_objtype,Q})\n"),
+ p(" end.\n").
+
+
+gen_read_type_clauses([],_) -> done;
+gen_read_type_clauses([Objtype|Objtypes],DB) ->
+ OptsFuns = lists:map(fun (L) -> list_to_tuple(L) end,
+ ets:match(DB,{Objtype,'$1','$2',read})),
+ p(" ~p -> \ncase Key of\n",[Objtype]),
+ gen_readopt_case_clauses(merge_opts(opt_prio(),OptsFuns)),
+ p(" _ -> \nhandle_external_read(gstk_~p:read_option(Option,Gstkid,TkW,DB,ExtraArg))\n",[Objtype]),
+ p(" end;\n"),
+ gen_read_type_clauses(Objtypes,DB).
+
+gen_readopt_case_clauses([]) ->
+ done;
+gen_readopt_case_clauses([{Opt,Fun}|OptFuncs]) ->
+ p(" ~p -> \n~p(Option,Gstkid,TkW,DB,ExtraArg);\n",[Opt,Fun]),
+ gen_readopt_case_clauses(OptFuncs).
+
+
+p(Str) ->
+ ok = io:format(get(stdout),Str,[]).
+
+p(Format,Data) ->
+ ok = io:format(get(stdout),Format,Data).
+
+%%----------------------------------------------------------------------
+%% There items should be placed early in a case statement.
+%%----------------------------------------------------------------------
+obj_prio() -> [rectangle,line,gridline,image,button,canvas,checkbutton,radiobutton].
+opt_prio() -> [x,y,width,height,move,coords,data].
+
+merge_types(Types) ->
+ T2 = ordsets:from_list(Types),
+ P2 = ordsets:from_list(obj_prio()),
+ obj_prio() ++ ordsets:subtract(T2, P2).
+
+merge_opts([],L) -> L;
+merge_opts([Opt|Opts],Dict) ->
+ case gs:assq(Opt,Dict) of
+ {value,V} -> [{Opt,V}|merge_opts(Opts,lists:keydelete(Opt,1,Dict))];
+ false -> merge_opts(Opts,Dict)
+ end.
+
+the_config() ->
+ Buttons=[button,checkbutton,radiobutton],
+ AllPureTk = [Buttons,canvas,editor,entry,frame,label,listbox,
+ menubar,menubutton,scale,window],
+ CanvasObj = [arc,image,line,oval,polygon,rectangle,text],
+ All = [AllPureTk,CanvasObj,grid,gridline,menu,menuitem,gs],
+ Containers = [canvas,frame,grid,menu,menubar,menubutton,menuitem,window],
+ Ob1 = [Buttons,canvas,grid,frame,label,entry,editor,listbox,scale],
+ Ob2 = [button,checkbutton,radiobutton,label,menubutton],
+ Ob3 = [Buttons,frame,label,entry,editor,listbox,scale,menubutton,
+ menubar,menu],
+ Ob4 = [canvas,editor,listbox],
+ [{[Buttons,entry,scale,menubutton],enable,gen_enable,rw},
+ {[Buttons,label,entry,scale,menubutton,menu],fg,gen_fg,rw},
+ {[Buttons,label,entry,scale,menubutton,menu],bg,gen_bg,rw},
+ {Ob1,anchor,gen_anchor,rw},
+ {Ob1,height,gen_height,r},
+ {Ob1--[frame],height,gen_height,w},
+ {Ob1,width,gen_width,r},
+ {Ob1--[frame],width,gen_width,w},
+ {Ob1,pack_x,gen_pack_x,rw},
+ {Ob1,pack_y,gen_pack_y,rw},
+ {Ob1,pack_xy,gen_pack_xy,w},
+ {Ob1,x,gen_x,rw},
+ {Ob1,y,gen_y,rw},
+ {Ob1,raise,gen_raise,w},
+ {Ob1,lower,gen_lower,w},
+ {Ob2,align,gen_align,rw},
+ {Ob2,font,gen_font,rw},
+ {Ob2,justify,gen_justify,rw},
+ {Ob2,padx,gen_padx,rw},
+ {Ob2,pady,gen_pady,rw},
+ {Containers,default,gen_default,w},
+ {[AllPureTk,menu],relief,gen_relief,rw},
+ {[AllPureTk,menu],bw,gen_bw,rw},
+ {[Buttons,canvas,frame,label,entry,scale,menubutton,menu,menubar],
+ setfocus,gen_setfocus,rw},
+ {Ob3,buttonpress,gen_buttonpress,rw},
+ {Ob3,buttonrelease,gen_buttonrelease,rw},
+ {Ob3,configure,gen_configure,rw},
+ {[Ob3,window],destroy,gen_destroy,rw},
+ {[Ob3,window],enter,gen_enter,rw},
+ {[Ob3,window],leave,gen_leave,rw},
+ {[Ob3,window],focus,gen_focus_ev,rw},
+ {[Ob3,window],keypress,gen_keypress,rw},
+ {[Ob3,window],keyrelease,gen_keyrelease,rw},
+ {Ob3,motion,gen_motion,rw},
+ %% events containing x,y are special
+ {[window],buttonpress,gen_buttonpress,r},
+ {[window],buttonrelease,gen_buttonrelease,r},
+ {[window],motion,gen_motion,r},
+ {All,font_wh,gen_font_wh,r},
+ {All,choose_font,gen_choose_font,r},
+ {All,data,gen_data,rw},
+ {All,children,gen_children,r},
+ {All,id,gen_id,r},
+ {All,parent,gen_parent,r},
+ {All,type,gen_type,r},
+ {All,beep,gen_beep,w},
+ {All,keep_opt,gen_keep_opt,w},
+ {All,flush,gen_flush,rw},
+ {AllPureTk,highlightbw,gen_highlightbw,rw},
+ {AllPureTk,highlightbg,gen_highlightbg,rw},
+ {AllPureTk,highlightfg,gen_highlightfg,rw},
+ {AllPureTk,cursor,gen_cursor,rw}, % bug
+ {[Buttons,label,menubutton],label,gen_label,rw},
+ {[Buttons,menubutton,menu],activebg,gen_activebg,rw},
+ {[Buttons,menubutton,menu],activefg,gen_activefg,rw},
+ {[entry],selectbg,gen_selectbg,rw},
+ {[entry],selectbw,gen_selectbw,rw},
+ {[entry],selectfg,gen_selectfg,rw},
+ {Ob4,activebg,gen_so_activebg,rw},
+ {Ob4,bc,gen_so_bc,rw},
+ {Ob4,bg,gen_so_bg,rw},
+ {Ob4,hscroll,gen_so_hscroll,r},
+ {Ob4,scrollbg,gen_so_scrollbg,rw},
+ {Ob4,scrollfg,gen_so_scrollfg,rw},
+ {Ob4,scrolls,gen_so_scrolls,w},
+ {Ob4,selectbg,gen_so_selectbg,rw},
+ {Ob4,selectbg,gen_so_selectbg,rw},
+ {Ob4,selectbw,gen_so_selectbw,rw},
+ {Ob4,selectbw,gen_so_selectbw,rw},
+ {Ob4,selectfg,gen_so_selectfg,rw},
+ {Ob4,selectfg,gen_so_selectfg,rw},
+ {Ob4,vscroll,gen_so_vscroll,r},
+ {CanvasObj,coords,gen_citem_coords,rw},
+ {CanvasObj,lower,gen_citem_lower,w},
+ {CanvasObj,raise,gen_citem_raise,w},
+ {CanvasObj,move,gen_citem_move,w},
+ {CanvasObj,setfocus,gen_citem_setfocus,rw},
+ {CanvasObj,buttonpress,gen_citem_buttonpress,w}, % should be rw
+ {CanvasObj,buttonrelease,gen_citem_buttonrelease,w},
+ {CanvasObj,enter,gen_citem_enter,w},
+ {CanvasObj,focus,gen_citem_setfocus,w},
+ {CanvasObj,keypress,gen_citem_keypress,w},
+ {CanvasObj,keyrelease,gen_citem_keyrelease,w},
+ {CanvasObj,leave,gen_citem_leave,w},
+ {CanvasObj,motion,gen_citem_motion,w},
+ {CanvasObj,buttonpress,gen_buttonpress,r},
+ {CanvasObj,buttonrelease,gen_buttonrelease,r},
+ {CanvasObj,configure,gen_configure,r},
+ {CanvasObj,destroy,gen_destroy,r},
+ {CanvasObj,enter,gen_enter,r},
+ {CanvasObj,leave,gen_leave,r},
+ {CanvasObj,focus,gen_focus_ev,r},
+ {CanvasObj,keypress,gen_keypress,r},
+ {CanvasObj,keyrelease,gen_keyrelease,r},
+ {CanvasObj,motion,gen_motion,r},
+ {[arc,oval,polygon,rectangle],fill,gen_citem_fill,rw}].
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/inf_loop2.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/inf_loop2.erl
new file mode 100644
index 0000000000..fbbec10a55
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/inf_loop2.erl
@@ -0,0 +1,23 @@
+%%---------------------------------------------------------------------
+%% Module that went into an infinite loop when trying to assign types.
+%%
+%% What was happening is that for functions which are in an SCC but all
+%% return none(), a second chance was given to them by the analysis to
+%% see whether they return none() because they are involved in an loop
+%% (presumably server-related) and could be assigned the type unit()
+%% instead. The problem is that when the really return none() for some
+%% other reason (an error such in this case) then we will again find
+%% none() and try again for unit(), thereby entering an infinite loop.
+%% The issue was resolved on May 17th by adding an appropriate boolean
+%% parameter to dialyzer_typesig:solve_scc() function.
+%%---------------------------------------------------------------------
+-module(inf_loop2).
+
+-export([test/0]).
+
+test() ->
+ lists:reverse(gazonk),
+ loop().
+
+loop() ->
+ test().
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/letrec1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/letrec1.erl
new file mode 100644
index 0000000000..f5c265cc60
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/letrec1.erl
@@ -0,0 +1,13 @@
+%%%-------------------------------------------------------------------
+%%% File : letrec1.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 9 Mar 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(letrec1).
+
+-export([t/1]).
+
+t(Opts) ->
+ [Opt || Opt <- Opts, Opt =/= compressed].
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/list_match.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/list_match.erl
new file mode 100644
index 0000000000..77de6d7dee
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/list_match.erl
@@ -0,0 +1,20 @@
+%%%-------------------------------------------------------------------
+%%% File : list_match.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 12 Mar 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(list_match).
+
+-export([t/0]).
+
+t() ->
+ t([1,2,3,4]).
+
+t([]) ->
+ ok;
+t([H|T]) when is_integer(H) ->
+ t(T);
+t([_|T]) ->
+ t(T).
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/lzip.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/lzip.erl
new file mode 100644
index 0000000000..753d2939d8
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/lzip.erl
@@ -0,0 +1,8 @@
+-module(lzip).
+-export([test/0, test/1]).
+
+test() ->
+ lists:zip([],[]).
+
+test(L) ->
+ lists:zip(L, []).
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/make_tuple.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/make_tuple.erl
new file mode 100644
index 0000000000..0a5edf8c24
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/make_tuple.erl
@@ -0,0 +1,5 @@
+-module(make_tuple).
+-export([test/0]).
+
+test() ->
+ {_,_} = erlang:make_tuple(3, []).
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/minus_minus.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/minus_minus.erl
new file mode 100644
index 0000000000..f1e9483c40
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/minus_minus.erl
@@ -0,0 +1,8 @@
+%%------------------------------------------------------------------------
+%% Test file which gave a bogus warning when analyzed with Dialyzer 1.6.1.
+%%------------------------------------------------------------------------
+-module(minus_minus).
+-export([test/0]).
+
+test() ->
+ [] -- [].
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/mod_info.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/mod_info.erl
new file mode 100644
index 0000000000..a24e4276ad
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/mod_info.erl
@@ -0,0 +1,5 @@
+-module(mod_info).
+-export([test/0]).
+
+test() ->
+ {module_info(), module_info(compile)}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/my_filter.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/my_filter.erl
new file mode 100644
index 0000000000..a67c4bd432
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/my_filter.erl
@@ -0,0 +1,17 @@
+-module(my_filter).
+-export([test/0]).
+
+test() ->
+ filter(fun mystery/1, [1,2,3,4]).
+
+filter(Pred, List) when is_function(Pred, 1) ->
+ [ E || E <- List, Pred(E) ].
+
+mystery(X) ->
+ case (X rem 3) of
+ 0 -> true;
+ 1 -> false;
+ 2 -> gazonk
+ end.
+
+%% mystery(_X,_Y) -> true.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/my_sofs.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/my_sofs.erl
new file mode 100644
index 0000000000..32252071d2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/my_sofs.erl
@@ -0,0 +1,83 @@
+%% Program showing the problems with record field accesses.
+
+-module(my_sofs).
+-export([ordset_of_sets/3, is_equal/2]).
+
+-define(TAG, 'Set').
+-define(ORDTAG, 'OrdSet').
+
+-record(?TAG, {data = [], type = type}).
+-record(?ORDTAG, {orddata = {}, ordtype = type}).
+
+-define(LIST(S), (S)#?TAG.data).
+-define(TYPE(S), (S)#?TAG.type).
+-define(SET(L, T), #?TAG{data = L, type = T}).
+-define(IS_SET(S), record(S, ?TAG)).
+
+%% Ordered sets and atoms:
+-define(ORDDATA(S), (S)#?ORDTAG.orddata).
+-define(ORDTYPE(S), (S)#?ORDTAG.ordtype).
+-define(ORDSET(L, T), #?ORDTAG{orddata = L, ordtype = T}).
+-define(IS_ORDSET(S), record(S, ?ORDTAG)).
+
+%% When IS_SET is true:
+-define(ANYTYPE, '_').
+-define(REL_TYPE(I, R), element(I, R)).
+-define(SET_OF(X), [X]).
+
+is_equal(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
+ case match_types(?TYPE(S1), ?TYPE(S2)) of
+ true -> ?LIST(S1) == ?LIST(S2);
+ false -> erlang:error(type_mismatch, [S1, S2])
+ end;
+is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_ORDSET(S2) ->
+ case match_types(?TYPE(S1), ?TYPE(S2)) of
+ true -> ?ORDDATA(S1) == ?ORDDATA(S2);
+ false -> erlang:error(type_mismatch, [S1, S2])
+ end;
+is_equal(S1, S2) when ?IS_SET(S1), ?IS_ORDSET(S2) ->
+ erlang:error(type_mismatch, [S1, S2]);
+is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_SET(S2) ->
+ erlang:error(type_mismatch, [S1, S2]).
+
+%% Type = OrderedSetType
+%% | SetType
+%% | atom() except '_'
+%% OrderedSetType = {Type, ..., Type}
+%% SetType = [ElementType] % list of exactly one element
+%% ElementType = '_' % any type (implies empty set)
+%% | Type
+
+ordset_of_sets([S | Ss], L, T) when ?IS_SET(S) ->
+ ordset_of_sets(Ss, [?LIST(S) | L], [[?TYPE(S)] | T]);
+ordset_of_sets([S | Ss], L, T) when ?IS_ORDSET(S) ->
+ ordset_of_sets(Ss, [?LIST(S) | L], [?ORDTYPE(S) | T]);
+ordset_of_sets([], L, T) ->
+ ?ORDSET(list_to_tuple(lists:reverse(L)), list_to_tuple(lists:reverse(T)));
+ordset_of_sets(_, _L, _T) ->
+ error.
+
+%% inlined.
+match_types(T, T) -> true;
+match_types(Type1, Type2) -> match_types1(Type1, Type2).
+
+match_types1(Atom, Atom) when is_atom(Atom) ->
+ true;
+match_types1(?ANYTYPE, _) ->
+ true;
+match_types1(_, ?ANYTYPE) ->
+ true;
+match_types1(?SET_OF(Type1), ?SET_OF(Type2)) ->
+ match_types1(Type1, Type2);
+match_types1(T1, T2) when tuple(T1), tuple(T2), size(T1) =:= size(T2) ->
+ match_typesl(size(T1), T1, T2);
+match_types1(_T1, _T2) ->
+ false.
+
+match_typesl(0, _T1, _T2) ->
+ true;
+match_typesl(N, T1, T2) ->
+ case match_types1(?REL_TYPE(N, T1), ?REL_TYPE(N, T2)) of
+ true -> match_typesl(N-1, T1, T2);
+ false -> false
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/no_match.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/no_match.erl
new file mode 100644
index 0000000000..e3e7a4b2d1
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/no_match.erl
@@ -0,0 +1,9 @@
+-module(no_match).
+-export([t1/1, t2/1, t3/1]).
+-record(rec, {field}).
+
+t1(#rec{} = {_}) -> no_match1.
+
+t2(42 = gazonk) -> no_match2.
+
+t3(X) when false -> X.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun.erl
new file mode 100644
index 0000000000..0bd8ba402c
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun.erl
@@ -0,0 +1,20 @@
+-module(no_unused_fun).
+-export([main/2]).
+
+main(X, Bool) ->
+ case Bool of
+ true ->
+ F = fun foo/1;
+ false ->
+ F = fun foobar/1
+ end,
+ calc(X, F).
+
+calc(X, Fun) ->
+ Fun(X).
+
+foo(A) ->
+ A+42.
+
+foobar(A) ->
+ A-42.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun2.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun2.erl
new file mode 100644
index 0000000000..e287c4de5f
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/no_unused_fun2.erl
@@ -0,0 +1,20 @@
+-module(no_unused_fun2).
+-export([main/2]).
+
+main(X, Bool) ->
+ case Bool of
+ true ->
+ F = fun foo/1;
+ false ->
+ F = fun foobar/1
+ end,
+ spawn(fun()->calc(X, F)end).
+
+calc(X, Fun) ->
+ Fun(X).
+
+foo(A) ->
+ A+42.
+
+foobar(A) ->
+ A-42.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/non_existing.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/non_existing.erl
new file mode 100644
index 0000000000..5701b8a745
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/non_existing.erl
@@ -0,0 +1,13 @@
+%%--------------------------------------------------------------------------
+%% Module which contains direct and indirect calls to remote functions
+%% which do not exist. Their treatment should be the same.
+%%--------------------------------------------------------------------------
+-module(non_existing).
+-export([t_call/0, t_fun/0]).
+
+t_call() ->
+ lists:non_existing_call(42).
+
+t_fun() ->
+ Fun = fun lists:non_existing_fun/1,
+ Fun(42).
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/not_guard_crash.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/not_guard_crash.erl
new file mode 100644
index 0000000000..0350864dce
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/not_guard_crash.erl
@@ -0,0 +1,49 @@
+%% From: Matthias Radestock <[email protected]>
+%% Date: 19 August 2007
+%%
+%% when I run dialyzer on my code it throws the following error:
+%%
+%% Analysis failed with error report:
+%% {{case_clause,any},
+%% [{dialyzer_dataflow,bind_guard,5},
+%% {dialyzer_dataflow,bind_guard_case_clauses,6},
+%% {dialyzer_dataflow,bind_guard,5},
+%% {dialyzer_dataflow,bind_guard_case_clauses,6},
+%% {dialyzer_dataflow,bind_guard,5},
+%% {dialyzer_dataflow,bind_eqeq_guard_lit_other,6},
+%% {dialyzer_dataflow,bind_guard,...},
+%% {dialyzer_dataflow,...}]}
+%%
+%% This is happening with the R11B-5 version of dialyzer when
+%% analyzing the attached file.
+%%--------------------------------------------------------------------
+
+-module(not_guard_crash).
+
+-export([match_ticket/2]).
+
+-record(ticket, {passive_flag, active_flag, write_flag, read_flag}).
+
+%%--------------------------------------------------------------------
+
+match_ticket(#ticket{passive_flag = PP,
+ active_flag = PA,
+ write_flag = PW,
+ read_flag = PR},
+ #ticket{passive_flag = TP,
+ active_flag = TA,
+ write_flag = TW,
+ read_flag = TR}) ->
+ if
+ %% Matches if either we're not requesting passive access, or
+ %% passive access is permitted, and ...
+ (not(TP) orelse PP) andalso
+ (not(TA) orelse PA) andalso
+ (not(TW) orelse PW) andalso
+ (not(TR) orelse PR) ->
+ match;
+ true ->
+ no_match
+ end.
+
+%%--------------------------------------------------------------------
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/or_bug.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/or_bug.erl
new file mode 100644
index 0000000000..fb8f6558b8
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/or_bug.erl
@@ -0,0 +1,24 @@
+%%---------------------------------------------------------------------------
+%% From: Per Hedeland <[email protected]>
+%% Date: 11 Feb 2010
+%%
+%% The code below demonstrates a bug in dialyzer - it produces the warning:
+%% Clause guard cannot succeed.
+%% The variable Cs was matched against the type any()
+%% for the first test/1 clause, but of course the claim can easily be easily
+%% refuted by calling test(#cs{}).
+%%---------------------------------------------------------------------------
+
+-module(or_bug).
+
+-export([test/1]).
+
+-record(cs, {children = [], actions = []}).
+
+-define(is_internal(X), ((X#cs.children =/= []) or
+ (X#cs.actions =/= []))).
+-define(has_children(X), (X#cs.children /= [])).
+
+test(Cs) when not ?is_internal(Cs) -> foo;
+test(Cs) when not ?has_children(Cs) -> bar;
+test(Cs) when Cs#cs.children =/= [] -> baz.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug.erl
new file mode 100644
index 0000000000..626f2b7f03
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug.erl
@@ -0,0 +1,17 @@
+%%%-------------------------------------------------------------------
+%%% File : orelsebug.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 14 Nov 2006 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(orelsebug).
+
+-export([t/1, t1/1]).
+
+t(Format) when is_list(Format) ->
+ t1(Format).
+
+t1(Format) when is_list(Format) orelse is_binary(Format) ->
+ Format.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug2.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug2.erl
new file mode 100644
index 0000000000..52b1b3b5a9
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/orelsebug2.erl
@@ -0,0 +1,23 @@
+%%%-------------------------------------------------------------------
+%%% File : orelsebug2.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 21 Nov 2006 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(orelsebug2).
+
+-export([t/1]).
+
+-record(eventdata, {
+ expires
+ }).
+
+t(L) ->
+ L2 = [E1 || E1 <- L, E1#eventdata.expires == x
+ orelse E1#eventdata.expires == y],
+
+ case L2 of
+ [_E] -> x;
+ [] -> y
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/overloaded1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/overloaded1.erl
new file mode 100644
index 0000000000..0af4f7446f
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/overloaded1.erl
@@ -0,0 +1,31 @@
+%%-----------------------------------------------------------------------------
+%% Test that tests overloaded contratcs.
+%% In December 2008 it works as far as intersection types are concerned (test1)
+%% However, it does NOT work as far as type variables are concerned (test2)
+%%-----------------------------------------------------------------------------
+-module(overloaded1).
+-export([test1/0, test2/0, foo/2]).
+
+test1() ->
+ {ok, gazonk} = foo({a,b,1}, atom_to_list(gazonk)),
+ ok.
+
+test2() ->
+ {ok, gazonk} = foo(baz, []),
+ ok.
+
+-type mod() :: atom().
+
+-spec foo(ATM, list()) -> {'ok', ATM} | {'error', _} when is_subtype(ATM, mod())
+ ; (MFA, list()) -> {'ok', MFA} | {'error', _} when is_subtype(MFA, mfa()).
+
+foo(F, _) when is_atom(F) ->
+ case atom_to_list(F) of
+ [42|_] -> {ok, F};
+ _Other -> {error, mod:bar(F)}
+ end;
+foo({M,F,A}, _) ->
+ case A =:= 0 of
+ false -> {ok, {M,F,A}};
+ true -> {error, M}
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/port_info_test.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/port_info_test.erl
new file mode 100644
index 0000000000..d8a5e15caf
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/port_info_test.erl
@@ -0,0 +1,34 @@
+%%
+%% Tests hardcoded dependent type info
+%% and the quality of the warnings that Dialyzer spits out
+%%
+-module(port_info_test).
+-export([t1/1, t2/1, t3/1, t4/1, t5/2, buggy/1]).
+
+%% The following errors are correctly caught, but the messages are a bit weird
+t1(X) when is_port(X) ->
+ {connected, 42} = erlang:port_info(X, connected);
+t1(_) -> ok.
+
+t2(X) when is_port(X) ->
+ {registered_name, "42"} = erlang:port_info(X, registered_name);
+t2(_) -> ok.
+
+%% Here only one od the two errors is reported...
+t3(X) when is_atom(X) ->
+ {output, 42} = erlang:port_info(X, connected);
+t3(_) -> ok.
+
+t4(X) when is_atom(X) ->
+ {Atom, _} = erlang:port_info(X, connected),
+ Atom = links;
+t4(_) -> ok.
+
+t5(X, Atom) when is_port(X) ->
+ {gazonk, _} = erlang:port_info(X, Atom);
+t5(_, _) -> ok.
+
+%% The type system is not strong enough to catch the following errors
+buggy(X) when is_atom(X) ->
+ {links, X} = erlang:port_info(foo, X).
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/process_info_test.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/process_info_test.erl
new file mode 100644
index 0000000000..d098884f4d
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/process_info_test.erl
@@ -0,0 +1,21 @@
+%%
+%% Tests hardcoded dependent type info for process_info/1
+%%
+-module(process_info_test).
+-export([pinfo/1]).
+
+pinfo(P) when node(P) == node() -> % On same node
+ case process_info(P) of
+ undefined ->
+ exit(dead);
+ Info -> Info
+ end;
+pinfo(P) -> % On different node
+ case rpc:call(node(P), erlang, process_info, [P]) of
+ {badrpc, _} ->
+ exit(badrpc);
+ undefined -> % This does happen
+ exit(dead);
+ Info -> Info
+ end.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_api.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_api.erl
new file mode 100644
index 0000000000..c30233b8f5
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_api.erl
@@ -0,0 +1,99 @@
+% Copyright 2007-2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : pubsub_api.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description : Publish API function
+%%%
+%%% Created : 17 Sep 2007 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2007-2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%% @version $Id $
+-module(pubsub_dir.pubsub_api).
+
+-author('[email protected]').
+-vsn('$Id: pubsub_api.erl,v 1.1 2009/11/06 12:39:55 maria Exp $ ').
+
+-export([publish/2, subscribe/2, unsubscribe/2, get_subscribers/1]).
+
+-import(transstore.transaction_api).
+-import(io).
+-import(lists).
+
+%%====================================================================
+%% public functions
+%%====================================================================
+
+%% @doc publishs an event under a given topic.
+%% called e.g. from the java-interface
+%% @spec publish(string(), string()) -> ok
+publish(Topic, Content) ->
+ Subscribers = get_subscribers(Topic),
+ io:format("calling subscribers ~p~n", [Subscribers]),
+ lists:foreach(fun (Subscriber) ->
+ io:format("calling ~p~n", [Subscriber]),
+ pubsub_publish:publish(Subscriber, Topic, Content)
+ end,
+ Subscribers),
+ ok.
+
+%% @doc subscribes a url for a topic.
+%% called e.g. from the java-interface
+%% @spec subscribe(string(), string()) -> ok | {fail, term()}
+subscribe(Topic, URL) ->
+ TFun = fun(TransLog) ->
+ {{Success, _ValueOrReason} = Result, TransLog1} = transaction_api:read(Topic, TransLog),
+ {Result2, TransLog2} = if
+ Success == fail ->
+ transaction_api:write(Topic, [URL], TransLog); %obacht: muss TransLog sein!
+ true ->
+ {value, Subscribers} = Result,
+ transaction_api:write(Topic, [URL | Subscribers], TransLog1)
+ end,
+ if
+ Result2 == ok ->
+ {{ok, ok}, TransLog2};
+ true ->
+ {Result2, TransLog2}
+ end
+ end,
+ transaction_api:do_transaction(TFun, fun (_) -> ok end, fun (X) -> {fail, X} end).
+
+%% @doc unsubscribes a url for a topic.
+-spec(unsubscribe/2 :: (string(), string()) -> ok | {fail, any()}).
+unsubscribe(Topic, URL) ->
+ TFun = fun(TransLog) ->
+ {Subscribers, TransLog1} = transaction_api:read2(TransLog, Topic),
+ case lists:member(URL, Subscribers) of
+ true ->
+ NewSubscribers = lists:delete(URL, Subscribers),
+ TransLog2 = transaction_api:write2(TransLog1, Topic, NewSubscribers),
+ {{ok, ok}, TransLog2};
+ false ->
+ {{fail, not_found}, TransLog}
+ end
+ end,
+ transaction_api:do_transaction(TFun, fun (_) -> ok end, fun (X) -> {fail, X} end).
+
+%% @doc queries the subscribers of a query
+%% @spec get_subscribers(string()) -> [string()]
+get_subscribers(Topic) ->
+ {Fl, _Value} = transaction_api:quorum_read(Topic),
+ if
+ Fl == fail -> %% Fl is either Fail or the Value/Subscribers
+ [];
+ true ->
+ Fl
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_publish.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_publish.erl
new file mode 100644
index 0000000000..97c993e576
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/pubsub/pubsub_publish.erl
@@ -0,0 +1,50 @@
+% Copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
+% You may obtain a copy of the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS,
+% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+% See the License for the specific language governing permissions and
+% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : pubsub_publish.erl
+%%% Author : Thorsten Schuett <[email protected]>
+%%% Description : Publish function
+%%%
+%%% Created : 26 Mar 2008 by Thorsten Schuett <[email protected]>
+%%%-------------------------------------------------------------------
+%% @author Thorsten Schuett <[email protected]>
+%% @copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
+%% @version $Id $
+-module(pubsub_dir.pubsub_publish).
+
+-author('[email protected]').
+-vsn('$Id: pubsub_publish.erl,v 1.1 2009/11/06 12:39:55 maria Exp $ ').
+
+-export([publish/3, publish_internal/3]).
+
+-import(json).
+-import(io).
+-import(http).
+-import(jsonrpc).
+
+%%====================================================================
+%% public functions
+%%====================================================================
+
+%% @doc publishs an event to a given url.
+%% @spec publish(string(), string(), string()) -> ok
+%% @todo use pool:pspawn
+publish(URL, Topic, Content) ->
+ spawn(fun () -> pubsub_publish:publish_internal(URL, Topic, Content) end),
+ ok.
+
+publish_internal(URL, Topic, Content) ->
+ Res = jsonrpc:call(URL, [], {call, notify, [Topic, Content]}),
+ io:format("~p ~p~n", [Res, URL]).
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/receive1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/receive1.erl
new file mode 100644
index 0000000000..2699a6da51
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/receive1.erl
@@ -0,0 +1,17 @@
+%%%-------------------------------------------------------------------
+%%% File : receive1.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 27 Mar 2007 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(receive1).
+
+-export([t/1]).
+
+t(X) ->
+ receive
+ after
+ infinity -> X
+ end.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/record_construct.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/record_construct.erl
new file mode 100644
index 0000000000..627e23956b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/record_construct.erl
@@ -0,0 +1,22 @@
+-module(record_construct).
+-export([t_loc/0, t_opa/0, t_rem/0]).
+
+-record(r_loc, {a = gazonk :: integer(), b = 42 :: atom()}).
+
+t_loc() ->
+ #r_loc{}.
+
+-record(r_opa, {a :: atom(),
+ b = gb_sets:new() :: gb_set(),
+ c = 42 :: boolean(),
+ d, % untyped on purpose
+ e = false :: boolean()}).
+
+t_opa() ->
+ #r_opa{}.
+
+-record(r_rem, {a = gazonk :: string()}).
+
+t_rem() ->
+ #r_rem{}.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/record_pat.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/record_pat.erl
new file mode 100644
index 0000000000..89228b8357
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/record_pat.erl
@@ -0,0 +1,19 @@
+%%%-------------------------------------------------------------------
+%%% File : record_pat.erl
+%%% Author : Tobias Lindahl <>
+%%% Description : Emit warning if a pattern violates the record type
+%%%
+%%% Created : 21 Oct 2008 by Tobias Lindahl <>
+%%%-------------------------------------------------------------------
+-module(record_pat).
+
+-export([t/1]).
+
+-record(foo, {bar :: integer()}).
+
+t(#foo{bar=baz}) -> no_way;
+t(#foo{bar=1}) -> ok.
+
+
+
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/record_send_test.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/record_send_test.erl
new file mode 100644
index 0000000000..742519e54e
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/record_send_test.erl
@@ -0,0 +1,33 @@
+%%-------------------------------------------------------------------
+%% File : record_send_test.erl
+%% Author : Kostis Sagonas <[email protected]>
+%% Description : A test inspired by a post of Mkcael Remond to the
+%% Erlang mailing list suggesting thst Dialyzer should
+%% be reporting sends to records rather than to pids.
+%% Dialyzer v1.3.0 indeed reports one of the dicrepancies
+%% (the one with the 4-tuple) but not the one where the
+%% message is sent to a pair which is a record.
+%% This should be fixed.
+%%
+%% Created : 10 Apr 2005 by Kostis Sagonas <[email protected]>
+%%-------------------------------------------------------------------
+-module(record_send_test).
+
+-export([t/0]).
+
+-record(rec1, {a=a, b=b, c=c}).
+-record(rec2, {a}).
+
+t() ->
+ t(#rec1{}).
+
+t(Rec1 = #rec1{b=B}) ->
+ Rec2 = some_mod:some_function(),
+ if
+ is_record(Rec2, rec2) ->
+ Rec2 ! hello; %% currently this one is not found
+ true ->
+ Rec1 ! hello_again
+ end,
+ B.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/record_test.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/record_test.erl
new file mode 100644
index 0000000000..8151e595a0
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/record_test.erl
@@ -0,0 +1,24 @@
+%%%-------------------------------------------------------------------
+%%% File : record_test.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 22 Oct 2004 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(record_test).
+
+-export([t/0]).
+
+-record(foo, {bar}).
+
+t() ->
+ doit(foo).
+
+doit(X) ->
+ case X of
+ #foo{} -> error1;
+ foo -> ok;
+ _ -> error2
+ end.
+
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types1.erl
new file mode 100644
index 0000000000..657d11653b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types1.erl
@@ -0,0 +1,10 @@
+-module(recursive_types1).
+
+-export([test/0]).
+
+-type tree() :: 'nil' | {non_neg_integer(), tree(), tree()}.
+
+-spec test() -> {42, tree(), tree()}.
+
+test() ->
+ {42, {42, nil, nil}, {42, {42, nil, nil}, {42, nil, nil}}}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types2.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types2.erl
new file mode 100644
index 0000000000..3a22bbf5d2
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types2.erl
@@ -0,0 +1,12 @@
+-module(recursive_types2).
+
+-export([test/0]).
+
+-type tree() :: 'nil' | {non_neg_integer(), child(), child()}.
+
+-type child() :: tree().
+
+-spec test() -> {42, tree(), tree()}.
+
+test() ->
+ {42, {42, nil, nil}, {42, {42, nil, nil}, {42, nil, nil}}}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types3.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types3.erl
new file mode 100644
index 0000000000..997678ac92
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types3.erl
@@ -0,0 +1,15 @@
+-module(recursive_types3).
+
+-export([test/1]).
+
+-record(tree, {node :: atom(),
+ kid = nil :: 'nil' | tree()}).
+
+-type tree() :: #tree{}.
+
+-spec test(tree()) -> tree().
+
+test(Tree) ->
+ case Tree of
+ #tree{node = root, kid=#tree{}} -> Tree
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types4.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types4.erl
new file mode 100644
index 0000000000..118bab57a1
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types4.erl
@@ -0,0 +1,13 @@
+-module(recursive_types4).
+
+-export([test/0]).
+
+-record(tree, {node :: atom(),
+ kid = nil :: 'nil' | tree()}).
+
+-type tree() :: #tree{}.
+
+-spec test() -> tree().
+
+test() ->
+ #tree{node = root, kid = #tree{}}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types5.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types5.erl
new file mode 100644
index 0000000000..a71e613cf0
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types5.erl
@@ -0,0 +1,13 @@
+-module(recursive_types5).
+
+-export([test/0]).
+
+-type tree() :: 'nil' | {non_neg_integer(), tree(), tree()}.
+
+-record(tree, {node :: atom(),
+ kid = 'nil' :: tree()}).
+
+-spec test() -> #tree{}.
+
+test() ->
+ #tree{node = root, kid = {42, {42, nil, nil}, {42, nil, nil}}}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types6.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types6.erl
new file mode 100644
index 0000000000..ff61976736
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types6.erl
@@ -0,0 +1,17 @@
+-module(recursive_types6).
+
+-export([test/0]).
+
+-record(tree, {node :: non_neg_integer(),
+ kid = nil :: child()}).
+
+-type tree() :: #tree{}.
+
+-record(child, {tree :: 'nil' | tree()}).
+
+-type child() :: #child{}.
+
+-spec test() -> tree().
+
+test() ->
+ #tree{node = 42, kid = #child{tree = #tree{node = 42, kid = #child{tree = nil}}}}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types7.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types7.erl
new file mode 100644
index 0000000000..92106e9694
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/recursive_types7.erl
@@ -0,0 +1,13 @@
+-module(recursive_types7).
+
+-export([test/0]).
+
+-type tree() :: 'nil' | {non_neg_integer(), recursive_types7:tree(),
+ recursive_types7:tree()}.
+
+-export_type([tree/0]).
+
+-spec test() -> {42, tree(), tree()}.
+
+test() ->
+ {42, {42, nil, nil}, {42, {42, nil, nil}, {42, nil, nil}}}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/refine_bug1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/refine_bug1.erl
new file mode 100644
index 0000000000..1b299e782a
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/refine_bug1.erl
@@ -0,0 +1,11 @@
+-module(refine_bug1).
+-export([f/1]).
+
+f(gazonk = X) ->
+ foo(X), % this call is currently not considered when refining foo's
+ throw(error); % type since it appears in a clause that throws an exception
+f(foo = X) ->
+ foo(X).
+
+foo(X) ->
+ X.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/toth.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/toth.erl
new file mode 100644
index 0000000000..bd7fa4982e
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/toth.erl
@@ -0,0 +1,99 @@
+-module(toth).
+-export([sys_table_view/1]).
+
+%%% Constants
+-define(sysTabETS,1).
+-define(sysTabMnesia,2).
+-define(sysTabBoth,3).
+
+sys_table_view([CpId,{match,Pattern},TableType, ViewType]) ->
+ AllTableList =
+ case TableType of
+ ?sysTabMnesia ->
+ lists:sort(mnesia:system_info(tables));
+ ?sysTabBoth ->
+ lists:sort(rpc:call(CpId,ets,all,[]));
+ ?sysTabETS ->
+ lists:sort(rpc:call(CpId,ets,all,[]) --
+ mnesia:system_info(tables));
+ _ -> %%% Happens at registration only
+ [ok]
+ end,
+ %% Filter the matching table names, skip unnamed tables first:
+ NamedTableList = lists:filter(fun (X) -> is_atom(X) end, AllTableList),
+ TablesShown =
+ case Pattern of
+ "" ->
+ NamedTableList;
+ _ ->
+ %% Filter the ones whose name begins with the Pattern:
+ Filter = fun(T) ->
+ lists:prefix(Pattern, atom_to_list(T))
+ end,
+ lists:filter(Filter, NamedTableList)
+ end,
+
+ Fields = [{text, [{value,"CpId: " ++ atom_to_list(CpId)}]},
+ {text, [{value,"TabSpec=" ++ Pattern},
+ {value_format, term}]},
+ {text, [{value,"Table type: " ++ formatTableType(TableType)},
+ {value_format, term}]}],
+
+ Template = [[{type, index},
+ {link, {?MODULE, sys_table_browse,
+ [{"CpId",CpId},{"TableType",TableType},
+ {"View", ViewType},
+ {"FirstKey",1}, {"KeyPattern",""}]}}],
+
+ [{type, data},
+ {title, "Table name"},
+ {display_value, {erlang, atom_to_list}}], %%% else crash
+
+ [{type,data},
+ {title, "No of rows"},
+ {display_value, term}],
+
+ [{type,data},
+ {title, "Memory"},
+ {display_value, term}]
+ ],
+
+ TableAttr = [{rows, [[T,T|tableSize(T,TableType,CpId)] ||
+ T <- TablesShown]},
+ {template,Template}],
+
+ Page = [{header, {"Filter tables", "Selected tables"}},
+ {buttons, [reload, back]},
+ {layout, [{form, Fields},
+ {table, TableAttr}]}
+ ],
+ Page.
+
+%%--------------------------------------------------------------------
+%% tableSize/3
+%% @spec tableSize(T::atom(),TableType::integer(),CpId::atom()) ->
+%% list(integer())
+%% @doc Return the table size and memory size of the table.
+%% @end
+%%---------------------------------------------------------------------
+
+tableSize(T, TableType, CpId) ->
+ case TableType of
+ ?sysTabETS ->
+ [rpc:call(CpId, ets, info, [T, size]),
+ rpc:call(CpId, ets, info, [T, memory])];
+ ?sysTabMnesia ->
+ [mnesia:table_info(T, size),mnesia:table_info(T, memory)];
+ _ -> %%% Registration
+ [0,0]
+ end.
+
+formatTableType(T) ->
+ case T of
+ ?sysTabETS ->
+ "ETS";
+ ?sysTabMnesia ->
+ "mnesia";
+ _ -> %%% Registration !
+ "ETS + mnesia"
+ end.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/trec.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/trec.erl
new file mode 100644
index 0000000000..b36b0cafba
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/trec.erl
@@ -0,0 +1,37 @@
+%%
+%% The current treatment of typed records leaves much to be desired.
+%% These are not made up examples; I have cases like that the branch
+%% of the HiPE compiler with types in records. I get very confusing
+%% warnings which require a lot of effort to find their cause and why
+%% a function has no local return.
+%%
+-module(trec).
+-export([test/0, mk_foo_exp/2]).
+
+-record(foo, {a :: integer(), b :: [atom()]}).
+
+%%
+%% For these functions we currently get the following warnings:
+%% 1. Function test/0 has no local return
+%% 2. The call trec:mk_foo_loc(42,any()) will fail since it differs
+%% in argument position 1 from the success typing arguments:
+%% ('undefined',atom())
+%% 3. Function mk_foo_loc/2 has no local return
+%%
+%% Arguably, the second warning is not what most users have in mind
+%% when they wrote the type declarations in the 'foo' record, so no
+%% doubt they'll find it confusing. But note that it is also inconsistent!
+%% How come there is a success typing for a function that has no local return?
+%%
+test() ->
+ mk_foo_loc(42, bar:f()).
+
+mk_foo_loc(A, B) ->
+ #foo{a = A, b = [A,B]}.
+
+%%
+%% For this function we currently get "has no local return" but we get
+%% no reason; I want us to get a reason.
+%%
+mk_foo_exp(A, B) when is_integer(A) ->
+ #foo{a = A, b = [A,B]}.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/try1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/try1.erl
new file mode 100644
index 0000000000..d07380295b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/try1.erl
@@ -0,0 +1,27 @@
+%%%-------------------------------------------------------------------
+%%% File : try1.erl
+%%% Author : <[email protected]>
+%%% Description :
+%%%
+%%% Created : 23 Aug 2005 by <[email protected]>
+%%%-------------------------------------------------------------------
+-module(try1).
+
+-export([t/1]).
+
+t(X) ->
+ case wierd_is_bool(X) of
+ true -> ok;
+ false -> ok
+ end.
+
+wierd_is_bool(X) ->
+ try bool(X) of
+ Y -> Y
+ catch
+ _:_ -> false
+ end.
+
+bool(true) -> true;
+bool(false) -> true.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/tuple1.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/tuple1.erl
new file mode 100644
index 0000000000..c58aac9646
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/tuple1.erl
@@ -0,0 +1,29 @@
+%%%-------------------------------------------------------------------
+%%% File : tuple1.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description : Exposed two bugs in the analysis;
+%%% one supressed warning and one crash.
+%%%
+%%% Created : 13 Nov 2006 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+-module(tuple1).
+
+-export([t1/2, t2/2, t3/2, bar/2]).
+
+t1(List = [_|_], X) ->
+ lists:mapfoldl(fun foo/2, X, List).
+
+t2(List = [_|_], X) ->
+ lists:mapfoldl(fun bar/2, X, List).
+
+t3(List = [_|_], X) ->
+ lists:mapfoldl(fun baz/1, X, List).
+
+
+foo(1, 1) -> a;
+foo(a, 1) -> b.
+
+bar(1, 1) -> {b, b};
+bar(a, 1) -> {a, a}.
+
+baz(1) -> 1.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/unsafe_beamcode_bug.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/unsafe_beamcode_bug.erl
new file mode 100644
index 0000000000..889f94014e
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/unsafe_beamcode_bug.erl
@@ -0,0 +1,15 @@
+-module(unsafe_beamcode_bug).
+-export([test/1]).
+
+test(N) -> i(r(N)).
+
+%% this function cannot be exported, or the error does not occur
+i({one}) -> ok1;
+i({two, _}) -> ok2;
+i({three, {_,R}, _}) -> R.
+
+r(1) -> {one};
+r(2) -> {two, 2};
+r(42)-> {dummy, 42}; % without this clause, no problem ... hmm
+r(3) -> {three, {rec,ok3}, 2}.
+
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/unused_cases.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/unused_cases.erl
new file mode 100644
index 0000000000..e6e6693963
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/unused_cases.erl
@@ -0,0 +1,41 @@
+%%-------------------------------------------------------------------
+%% File : unused_cases.erl
+%% Author : Kostis Sagonas <[email protected]>
+%% Description : Tests that Dialyzer warns whenever it finds unused
+%% case clauses -- even those that are catch all.
+%%
+%% Created : 21 Jan 2007 by Kostis Sagonas <[email protected]>
+%%-------------------------------------------------------------------
+
+-module(unused_cases).
+-export([test/0]).
+
+test() -> % dummy function to avoid exporting stuff
+ ok = unreachable_catchall(42),
+ ok = unreachable_middle(42),
+ ok = unreachable_final(42).
+
+unreachable_catchall(X) ->
+ case mk_pair(X) of
+ {_,_} -> ok;
+ OTHER -> {unreachable_catchall, OTHER}
+ end.
+
+unreachable_middle(X) ->
+ case is_positive(X) of
+ true -> ok;
+ weird -> {unreachable_middle, weird};
+ false -> ok
+ end.
+
+unreachable_final(X) ->
+ case is_positive(X) of
+ true -> ok;
+ false -> ok;
+ OTHER-> {unreachable_final, OTHER}
+ end.
+
+mk_pair(X) -> {X, X}.
+
+is_positive(X) when is_integer(X), X > 0 -> true;
+is_positive(X) when is_integer(X) -> false.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/unused_clauses.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/unused_clauses.erl
new file mode 100644
index 0000000000..a98b227a6b
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/unused_clauses.erl
@@ -0,0 +1,18 @@
+%%-------------------------------------------------------------------
+%% File : unused_clauses.erl
+%% Author : Kostis Sagonas <[email protected]>
+%% Description : Tests that Dialyzer warns when it finds an unused
+%% clause.
+%%
+%% Created : 16 Mar 2006 by Kostis Sagonas <[email protected]>
+%%-------------------------------------------------------------------
+
+-module(unused_clauses).
+-export([test/0]).
+
+test() -> {t(atom), t({42})}.
+
+t(X) when is_atom(X) -> X;
+t(X) when is_integer(X) -> X;
+t(X) when is_tuple(X) -> element(1, X);
+t(X) when is_binary(X) -> X.
diff --git a/lib/dialyzer/test/small_tests_SUITE_data/src/zero_tuple.erl b/lib/dialyzer/test/small_tests_SUITE_data/src/zero_tuple.erl
new file mode 100644
index 0000000000..90dc366fe7
--- /dev/null
+++ b/lib/dialyzer/test/small_tests_SUITE_data/src/zero_tuple.erl
@@ -0,0 +1,13 @@
+-module(zero_tuple).
+-export([t1/0, t2/0]).
+
+t1() ->
+ {} = a(),
+ ok.
+
+t2() ->
+ b = a(),
+ ok.
+
+a() -> a.
+
diff --git a/lib/dialyzer/test/user_tests_SUITE.erl b/lib/dialyzer/test/user_tests_SUITE.erl
new file mode 100644
index 0000000000..9654114725
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE.erl
@@ -0,0 +1,78 @@
+%% ATTENTION!
+%% This is an automatically generated file. Do not edit.
+%% Use './remake' script to refresh it if needed.
+%% All Dialyzer options should be defined in dialyzer_options
+%% file.
+
+-module(user_tests_SUITE).
+
+-include("ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, init_per_suite/0, init_per_suite/1,
+ end_per_suite/1, all/0]).
+-export([user_tests_SUITE_consistency/1, broken_dialyzer/1,
+ gcpFlowControl/1, qlc_error/1, spvcOrig/1, wsp_pdu/1]).
+
+suite() ->
+ [{timetrap, {minutes, 3}}].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, []}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+all() ->
+ [user_tests_SUITE_consistency,broken_dialyzer,gcpFlowControl,qlc_error,
+ spvcOrig,wsp_pdu].
+
+dialyze(Config, TestCase) ->
+ Opts = ?config(dialyzer_options, Config),
+ Dir = ?config(data_dir, Config),
+ OutDir = ?config(priv_dir, Config),
+ dialyzer_common:check(TestCase, Opts, Dir, OutDir).
+
+user_tests_SUITE_consistency(Config) ->
+ Dir = ?config(data_dir, Config),
+ case dialyzer_common:new_tests(Dir, all()) of
+ [] -> ok;
+ New -> ct:fail({missing_tests,New})
+ end.
+
+broken_dialyzer(Config) ->
+ case dialyze(Config, broken_dialyzer) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+gcpFlowControl(Config) ->
+ case dialyze(Config, gcpFlowControl) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+qlc_error(Config) ->
+ case dialyze(Config, qlc_error) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+spvcOrig(Config) ->
+ case dialyze(Config, spvcOrig) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
+wsp_pdu(Config) ->
+ case dialyze(Config, wsp_pdu) of
+ 'same' -> 'same';
+ Error -> ct:fail(Error)
+ end.
+
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/dialyzer_options b/lib/dialyzer/test/user_tests_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..513ed7752b
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/dialyzer_options
@@ -0,0 +1,2 @@
+{dialyzer_options, []}.
+{time_limit, 3}. \ No newline at end of file
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/results/broken_dialyzer b/lib/dialyzer/test/user_tests_SUITE_data/results/broken_dialyzer
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/results/broken_dialyzer
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/results/gcpFlowControl b/lib/dialyzer/test/user_tests_SUITE_data/results/gcpFlowControl
new file mode 100644
index 0000000000..7938c53fc6
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/results/gcpFlowControl
@@ -0,0 +1,2 @@
+
+gcpFlowControl.erl:171: The pattern <Key, 'errors', X> can never match the type <_,'available' | 'bucket' | 'rejectable' | 'rejects' | 'window',0 | 1 | 20>
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/results/qlc_error b/lib/dialyzer/test/user_tests_SUITE_data/results/qlc_error
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/results/qlc_error
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/results/spvcOrig b/lib/dialyzer/test/user_tests_SUITE_data/results/spvcOrig
new file mode 100644
index 0000000000..8c57358af0
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/results/spvcOrig
@@ -0,0 +1,193 @@
+
+spvcOrig.erl:1238: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVcc',25) can never succeed
+spvcOrig.erl:1241: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVpc',20) can never succeed
+spvcOrig.erl:1244: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVpcPerm',12) can never succeed
+spvcOrig.erl:1247: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVccPerm',17) can never succeed
+spvcOrig.erl:1250: Guard test is_record(Spvc::[] | #spvcObj{},'spvcTargetVc',6) can never succeed
+spvcOrig.erl:1253: Guard test is_record(Spvc::[] | #spvcObj{},'spvcTargetVp',6) can never succeed
+spvcOrig.erl:1256: Guard test is_record(Spvc::[] | #spvcObj{},'pchVc',32) can never succeed
+spvcOrig.erl:1259: Guard test is_record(Spvc::[] | #spvcObj{},'pchVp',33) can never succeed
+spvcOrig.erl:1262: Guard test is_record(Spvc::[] | #spvcObj{},'spvcFr',21) can never succeed
+spvcOrig.erl:1265: Guard test is_record(Spvc::[] | #spvcObj{},'spvcFrPerm',6) can never succeed
+spvcOrig.erl:1268: The pattern {If_Value, _, _, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:1270: The pattern {If_Value, _, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:1272: The pattern {If_Value, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:1274: The pattern [If_Value | _] can never match the type [] | #spvcObj{}
+spvcOrig.erl:1380: The variable _ can never match since previous clauses completely covered the type any()
+spvcOrig.erl:1389: The variable _ can never match since previous clauses completely covered the type any()
+spvcOrig.erl:1576: Guard test is_record(Row::[any(),...],'spvcObj',24) can never succeed
+spvcOrig.erl:1583: Guard test is_record(Row::[any(),...],'spvcVcc',25) can never succeed
+spvcOrig.erl:1586: Guard test is_record(Row::[any(),...],'spvcVpc',20) can never succeed
+spvcOrig.erl:1589: Guard test is_record(Row::[any(),...],'spvcVpcPerm',12) can never succeed
+spvcOrig.erl:1592: Guard test is_record(Row::[any(),...],'spvcVccPerm',17) can never succeed
+spvcOrig.erl:1595: Guard test is_record(Row::[any(),...],'spvcTargetVc',6) can never succeed
+spvcOrig.erl:1598: Guard test is_record(Row::[any(),...],'spvcTargetVp',6) can never succeed
+spvcOrig.erl:1601: Guard test is_record(Row::[any(),...],'pchVc',32) can never succeed
+spvcOrig.erl:1604: Guard test is_record(Row::[any(),...],'pchVp',33) can never succeed
+spvcOrig.erl:1607: Guard test is_record(Row::[any(),...],'spvcFr',21) can never succeed
+spvcOrig.erl:1610: Guard test is_record(Row::[any(),...],'spvcFrPerm',6) can never succeed
+spvcOrig.erl:1613: The pattern {If_Value, _, _, _} can never match the type [any(),...]
+spvcOrig.erl:1615: The pattern {If_Value, _, _} can never match the type [any(),...]
+spvcOrig.erl:1617: The pattern {If_Value, _} can never match the type [any(),...]
+spvcOrig.erl:1621: The variable _ can never match since previous clauses completely covered the type [any(),...]
+spvcOrig.erl:1731: The pattern [_, _, _, _] can never match the type tuple()
+spvcOrig.erl:1733: The pattern [_, _, _] can never match the type tuple()
+spvcOrig.erl:1735: The pattern [_, _] can never match the type tuple()
+spvcOrig.erl:264: The pattern {If_Value, Vpi_Value} can never match the type {_,_,_}
+spvcOrig.erl:271: Guard test is_integer(Vci_Value::'no_vc') can never succeed
+spvcOrig.erl:275: The pattern {If_Value, Vpi_Value} can never match the type {_,_,'no_vc'}
+spvcOrig.erl:305: The pattern {'spvcVcc', 'targetAddress'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:307: The pattern {'spvcVcc', 'selectType'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:309: The pattern {'spvcVcc', 'targetVpi'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:311: The pattern {'spvcVcc', 'targetVci'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:313: The pattern {'spvcVcc', 'releaseCause'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:315: The pattern {'spvcVcc', 'releaseDiagnostic'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:317: The pattern {'spvcVcc', 'retryInterval'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:319: The pattern {'spvcVcc', 'retryTimer'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:321: The pattern {'spvcVcc', 'retryThreshold'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:323: The pattern {'spvcVcc', 'retryFailures'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:325: The pattern {'spvcVcc', 'retryLimit'} can never match the type {'spvcFr','rowStatus'} | {'spvcVcc','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:329: The pattern {'spvcVcc', 'restart'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:331: The pattern {'spvcVcc', 'targetSelectType_any'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:333: The pattern {'spvcVcc', 'targetSelectType_required'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:335: The pattern {'spvcVpc', 'targetAddress'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:337: The pattern {'spvcVpc', 'selectType'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:339: The pattern {'spvcVpc', 'targetVpi'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:341: The pattern {'spvcVpc', 'releaseCause'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:343: The pattern {'spvcVpc', 'releaseDiagnostic'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:345: The pattern {'spvcVpc', 'retryInterval'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:347: The pattern {'spvcVpc', 'retryTimer'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:349: The pattern {'spvcVpc', 'retryThreshold'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:351: The pattern {'spvcVpc', 'retryFailures'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:353: The pattern {'spvcVpc', 'retryLimit'} can never match the type {'spvcFr','rowStatus'} | {'spvcVpc','rowStatus'}
+spvcOrig.erl:357: The pattern {'spvcVpc', 'restart'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:359: The pattern {'spvcVpc', 'targetSelectType_any'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:361: The pattern {'spvcVpc', 'targetSelectType_required'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:363: The pattern {'spvcFr', 'targetAddress'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:365: The pattern {'spvcFr', 'selectType'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:367: The pattern {'spvcFr', 'identifier'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:369: The pattern {'spvcFr', 'targetVpi'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:371: The pattern {'spvcFr', 'targetVci'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:373: The pattern {'spvcFr', 'translation'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:375: The pattern {'spvcFr', 'releaseCause'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:377: The pattern {'spvcFr', 'releaseDiagnostic'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:379: The pattern {'spvcFr', 'operStatus'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:381: The pattern {'spvcFr', 'adminStatus'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:383: The pattern {'spvcFr', 'restart'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:385: The pattern {'spvcFr', 'retryInterval'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:387: The pattern {'spvcFr', 'retryTimer'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:389: The pattern {'spvcFr', 'retryThreshold'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:391: The pattern {'spvcFr', 'retryFailures'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:393: The pattern {'spvcFr', 'retryLimit'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:395: The pattern {'spvcFr', 'lastChange'} can never match the type {'spvcFr','rowStatus'}
+spvcOrig.erl:404: Guard test is_record(Row::[any(),...],'spvcObj',24) can never succeed
+spvcOrig.erl:411: Guard test is_record(Row::[any(),...],'spvcVcc',25) can never succeed
+spvcOrig.erl:414: Guard test is_record(Row::[any(),...],'spvcVpc',20) can never succeed
+spvcOrig.erl:417: Guard test is_record(Row::[any(),...],'spvcVpcPerm',12) can never succeed
+spvcOrig.erl:420: Guard test is_record(Row::[any(),...],'spvcVccPerm',17) can never succeed
+spvcOrig.erl:423: Guard test is_record(Row::[any(),...],'spvcTargetVc',6) can never succeed
+spvcOrig.erl:426: Guard test is_record(Row::[any(),...],'spvcTargetVp',6) can never succeed
+spvcOrig.erl:429: Guard test is_record(Row::[any(),...],'pchVc',32) can never succeed
+spvcOrig.erl:432: Guard test is_record(Row::[any(),...],'pchVp',33) can never succeed
+spvcOrig.erl:435: Guard test is_record(Row::[any(),...],'spvcFr',21) can never succeed
+spvcOrig.erl:438: Guard test is_record(Row::[any(),...],'spvcFrPerm',6) can never succeed
+spvcOrig.erl:441: The pattern {If_Value, _, _, _} can never match the type [any(),...]
+spvcOrig.erl:443: The pattern {If_Value, _, _} can never match the type [any(),...]
+spvcOrig.erl:445: The pattern {If_Value, _} can never match the type [any(),...]
+spvcOrig.erl:449: The variable _ can never match since previous clauses completely covered the type [any(),...]
+spvcOrig.erl:468: Guard test is_record(Row::[any(),...],'spvcObj',24) can never succeed
+spvcOrig.erl:475: Guard test is_record(Row::[any(),...],'spvcVcc',25) can never succeed
+spvcOrig.erl:478: Guard test is_record(Row::[any(),...],'spvcVpc',20) can never succeed
+spvcOrig.erl:481: Guard test is_record(Row::[any(),...],'spvcVpcPerm',12) can never succeed
+spvcOrig.erl:484: Guard test is_record(Row::[any(),...],'spvcVccPerm',17) can never succeed
+spvcOrig.erl:487: Guard test is_record(Row::[any(),...],'spvcTargetVc',6) can never succeed
+spvcOrig.erl:490: Guard test is_record(Row::[any(),...],'spvcTargetVp',6) can never succeed
+spvcOrig.erl:493: Guard test is_record(Row::[any(),...],'pchVc',32) can never succeed
+spvcOrig.erl:496: Guard test is_record(Row::[any(),...],'pchVp',33) can never succeed
+spvcOrig.erl:499: Guard test is_record(Row::[any(),...],'spvcFr',21) can never succeed
+spvcOrig.erl:502: Guard test is_record(Row::[any(),...],'spvcFrPerm',6) can never succeed
+spvcOrig.erl:505: The pattern {If_Value, _, _, _} can never match the type [any(),...]
+spvcOrig.erl:507: The pattern {If_Value, _, _} can never match the type [any(),...]
+spvcOrig.erl:509: The pattern {If_Value, _} can never match the type [any(),...]
+spvcOrig.erl:513: The variable _ can never match since previous clauses completely covered the type [any(),...]
+spvcOrig.erl:546: The pattern {_, _, _, _} can never match the type [any(),...]
+spvcOrig.erl:548: The pattern {_, _, _} can never match the type [any(),...]
+spvcOrig.erl:550: The pattern {_, _} can never match the type [any(),...]
+spvcOrig.erl:559: The pattern {'spvcVcc', 'targetAddress'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:561: The pattern {'spvcVcc', 'selectType'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:563: The pattern {'spvcVcc', 'targetVpi'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:565: The pattern {'spvcVcc', 'targetVci'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:567: The pattern {'spvcVcc', 'releaseCause'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:569: The pattern {'spvcVcc', 'releaseDiagnostic'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:571: The pattern {'spvcVcc', 'retryInterval'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:573: The pattern {'spvcVcc', 'retryTimer'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:575: The pattern {'spvcVcc', 'retryThreshold'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:577: The pattern {'spvcVcc', 'retryFailures'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:579: The pattern {'spvcVcc', 'retryLimit'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:581: The pattern {'spvcVcc', 'rowStatus'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:585: The pattern {'spvcVcc', 'targetSelectType_any'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:587: The pattern {'spvcVcc', 'targetSelectType_required'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:589: The pattern {'spvcVpc', 'targetAddress'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:591: The pattern {'spvcVpc', 'selectType'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:593: The pattern {'spvcVpc', 'targetVpi'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:595: The pattern {'spvcVpc', 'releaseCause'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:597: The pattern {'spvcVpc', 'releaseDiagnostic'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:599: The pattern {'spvcVpc', 'retryInterval'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:601: The pattern {'spvcVpc', 'retryTimer'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:603: The pattern {'spvcVpc', 'retryThreshold'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:605: The pattern {'spvcVpc', 'retryFailures'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:607: The pattern {'spvcVpc', 'retryLimit'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:609: The pattern {'spvcVpc', 'rowStatus'} can never match the type {'spvcFr','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:613: The pattern {'spvcVpc', 'targetSelectType_any'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:615: The pattern {'spvcVpc', 'targetSelectType_required'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:617: The pattern {'spvcFr', 'targetAddress'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:619: The pattern {'spvcFr', 'selectType'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:621: The pattern {'spvcFr', 'identifier'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:623: The pattern {'spvcFr', 'targetVpi'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:625: The pattern {'spvcFr', 'targetVci'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:627: The pattern {'spvcFr', 'translation'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:629: The pattern {'spvcFr', 'releaseCause'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:631: The pattern {'spvcFr', 'releaseDiagnostic'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:633: The pattern {'spvcFr', 'operStatus'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:635: The pattern {'spvcFr', 'adminStatus'} can never match the type {'spvcFr','restart'}
+spvcOrig.erl:639: The pattern {'spvcFr', 'retryInterval'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:641: The pattern {'spvcFr', 'retryTimer'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:643: The pattern {'spvcFr', 'retryThreshold'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:645: The pattern {'spvcFr', 'retryFailures'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:647: The pattern {'spvcFr', 'retryLimit'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:649: The pattern {'spvcFr', 'lastChange'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:651: The pattern {'spvcFr', 'rowStatus'} can never match the type {'spvcFr','restart'} | {'spvcVcc','restart'} | {'spvcVpc','restart'}
+spvcOrig.erl:730: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVcc',25) can never succeed
+spvcOrig.erl:733: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVpc',20) can never succeed
+spvcOrig.erl:736: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVpcPerm',12) can never succeed
+spvcOrig.erl:739: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVccPerm',17) can never succeed
+spvcOrig.erl:742: Guard test is_record(Spvc::[] | #spvcObj{},'spvcTargetVc',6) can never succeed
+spvcOrig.erl:745: Guard test is_record(Spvc::[] | #spvcObj{},'spvcTargetVp',6) can never succeed
+spvcOrig.erl:748: Guard test is_record(Spvc::[] | #spvcObj{},'pchVc',32) can never succeed
+spvcOrig.erl:751: Guard test is_record(Spvc::[] | #spvcObj{},'pchVp',33) can never succeed
+spvcOrig.erl:754: Guard test is_record(Spvc::[] | #spvcObj{},'spvcFr',21) can never succeed
+spvcOrig.erl:757: Guard test is_record(Spvc::[] | #spvcObj{},'spvcFrPerm',6) can never succeed
+spvcOrig.erl:760: The pattern {If_Value, _, _, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:762: The pattern {If_Value, _, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:764: The pattern {If_Value, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:766: The pattern [If_Value | _] can never match the type [] | #spvcObj{}
+spvcOrig.erl:802: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVcc',25) can never succeed
+spvcOrig.erl:805: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVpc',20) can never succeed
+spvcOrig.erl:808: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVpcPerm',12) can never succeed
+spvcOrig.erl:811: Guard test is_record(Spvc::[] | #spvcObj{},'spvcVccPerm',17) can never succeed
+spvcOrig.erl:814: Guard test is_record(Spvc::[] | #spvcObj{},'spvcTargetVc',6) can never succeed
+spvcOrig.erl:817: Guard test is_record(Spvc::[] | #spvcObj{},'spvcTargetVp',6) can never succeed
+spvcOrig.erl:820: Guard test is_record(Spvc::[] | #spvcObj{},'pchVc',32) can never succeed
+spvcOrig.erl:823: Guard test is_record(Spvc::[] | #spvcObj{},'pchVp',33) can never succeed
+spvcOrig.erl:826: Guard test is_record(Spvc::[] | #spvcObj{},'spvcFr',21) can never succeed
+spvcOrig.erl:829: Guard test is_record(Spvc::[] | #spvcObj{},'spvcFrPerm',6) can never succeed
+spvcOrig.erl:832: The pattern {If_Value, _, _, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:834: The pattern {If_Value, _, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:836: The pattern {If_Value, _} can never match the type [] | #spvcObj{}
+spvcOrig.erl:838: The pattern [If_Value | _] can never match the type [] | #spvcObj{}
+spvcOrig.erl:951: The pattern [IfIndex_Value, Vpi_Value, Vci_Value, _] can never match the type tuple()
+spvcOrig.erl:953: The pattern [IfIndex_Value, Vpi_Value, _] can never match the type tuple()
+spvcOrig.erl:974: The pattern [IfIndex_Value, Vpi_Value, Vci_Value, _] can never match the type tuple()
+spvcOrig.erl:976: The pattern [IfIndex_Value, Vpi_Value, _] can never match the type tuple()
+spvcOrig.erl:996: The pattern [IfIndex_Value, Vpi_Value, Vci_Value, _] can never match the type tuple()
+spvcOrig.erl:998: The pattern [IfIndex_Value, Vpi_Value, _] can never match the type tuple()
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/results/wsp_pdu b/lib/dialyzer/test/user_tests_SUITE_data/results/wsp_pdu
new file mode 100644
index 0000000000..a47b1f1f2c
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/results/wsp_pdu
@@ -0,0 +1,25 @@
+
+wsp_pdu.erl:1063: The pattern [H | Hs] can never match the type []
+wsp_pdu.erl:1162: The call wsp_pdu:parse_push_flag(Value::[any()]) will never return since it differs in the 1st argument from the success typing arguments: (integer())
+wsp_pdu.erl:2400: Function decode_retry_after/2 has no local return
+wsp_pdu.erl:2403: The call wsp_pdu:d_date(Data1::binary()) will never return since it differs in the 1st argument from the success typing arguments: (integer() | {'short',binary()})
+wsp_pdu.erl:2406: Guard test is_integer(Sec::{[byte()] | byte() | {'long',binary()} | {'short',binary()},binary()}) can never succeed
+wsp_pdu.erl:2408: The pattern {'short', Data2} can never match the type {[byte()] | byte() | {'long',binary()} | {'short',binary()},binary()}
+wsp_pdu.erl:2755: Function parse_push_flag/1 has no local return
+wsp_pdu.erl:2756: The call erlang:integer_to_list(Value::[any()]) will never return since it differs in the 1st argument from the success typing arguments: (integer())
+wsp_pdu.erl:2875: The call wsp_pdu:d_text_string(Data::byte()) will never return since it differs in the 1st argument from the success typing arguments: (binary())
+wsp_pdu.erl:2976: The call wsp_pdu:d_q_value(QData::byte()) will never return since it differs in the 1st argument from the success typing arguments: (<<_:8,_:_*8>>)
+wsp_pdu.erl:3336: The call wsp_pdu:encode_typed_field(Ver::any(),'Q-value',ParamValue::any()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),'Constrained-encoding' | 'Date-value' | 'No-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',any())
+wsp_pdu.erl:3342: The call wsp_pdu:encode_typed_field(Ver::any(),'Ver-value',ParamValue::any()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),'Constrained-encoding' | 'Date-value' | 'No-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',any())
+wsp_pdu.erl:3349: The call wsp_pdu:encode_typed_field(Ver::any(),'Integer-value',ParamValue::any()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),'Constrained-encoding' | 'Date-value' | 'No-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',any())
+wsp_pdu.erl:3367: The call wsp_pdu:encode_typed_field(Ver::any(),'Field-name',ParamValue::any()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),'Constrained-encoding' | 'Date-value' | 'No-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',any())
+wsp_pdu.erl:3405: The call wsp_pdu:encode_typed_field(Ver::any(),'Delta-seconds-value',ParamValue::any()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),'Constrained-encoding' | 'Date-value' | 'No-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',any())
+wsp_pdu.erl:3437: The call wsp_pdu:encode_typed_field(Ver::any(),'Integer-value',ParamValue::any()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),'Constrained-encoding' | 'Date-value' | 'No-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',any())
+wsp_pdu.erl:3455: The call wsp_pdu:decode_typed_field('Version-value',Data::binary(),Version::any()) will never return since it differs in the 1st argument from the success typing arguments: ('Constrained-encoding' | 'Date-value' | 'Delta-seconds-value' | 'Field-name' | 'No-value' | 'Q-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',binary(),any())
+wsp_pdu.erl:3459: The call wsp_pdu:decode_typed_field('Integer-value',Data::binary(),Version::any()) will never return since it differs in the 1st argument from the success typing arguments: ('Constrained-encoding' | 'Date-value' | 'Delta-seconds-value' | 'Field-name' | 'No-value' | 'Q-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',binary(),any())
+wsp_pdu.erl:3531: The call wsp_pdu:decode_typed_field('Integer-value',Data::binary(),Version::any()) will never return since it differs in the 1st argument from the success typing arguments: ('Constrained-encoding' | 'Date-value' | 'Delta-seconds-value' | 'Field-name' | 'No-value' | 'Q-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',binary(),any())
+wsp_pdu.erl:3593: The pattern 'Delta-Seconds-value' can never match the type 'Delta-seconds-value' | 'Field-name' | 'Integer-value' | 'No-value' | 'Q-value' | 'Ver-value'
+wsp_pdu.erl:4844: The call wsp_pdu:d_long('data') will never return since it differs in the 1st argument from the success typing arguments: (binary())
+wsp_pdu.erl:510: The variable _ can never match since previous clauses completely covered the type 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9
+wsp_pdu.erl:512: The variable _ can never match since previous clauses completely covered the type 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9
+wsp_pdu.erl:5265: Call to missing or unexported function inet:ip_to_bytes/1
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/broken_dialyzer.erl b/lib/dialyzer/test/user_tests_SUITE_data/src/broken_dialyzer.erl
new file mode 100644
index 0000000000..fd9a6ada1a
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/broken_dialyzer.erl
@@ -0,0 +1,130 @@
+-module(broken_dialyzer).
+
+-export([do_move_next/1]).
+
+-define(ap_indices, 512).
+-define(dp_indices, 504).
+
+
+-record(apR,{a,c=[],n=[],nc=0,nn=0,nl=[]}).
+-define(apL(L), [#apR{a=A} || A <- L]).
+
+-define(gr, get(my_return_value)).
+-define(pr(PR), put(my_return_value, PR)).
+-record(bit,{i,c,n,s}). % index, current, next, state
+
+
+do_move_next({BL,AL}) ->
+ Max = max(length(BL), length(AL)),
+ Max2 = max(length(BL)*2, length(AL)),
+ MoveTo = [A || A <- AL, A#apR.nn < Max, A#apR.nn+A#apR.nc < Max2],
+ MoveFrom = [A || A <- AL,
+ (A#apR.nn > Max) orelse (A#apR.nn+A#apR.nc > Max2)],
+ Unchanged = (AL--MoveTo)--MoveFrom,
+ {BL1,{AL1,{AL2,AL3}}} =
+ lists:mapfoldl(
+ fun(B=#bit{i=I,c=C,s=S,n=Next}, {From,{To,FilledUp}})
+ when S==ok;S==lost_replica;S==moved_replica ->
+ case lists:keysearch(Next,#apR.a,From) of
+ {value, F=#apR{n=N1,nn=NN1,nc=NC1}}
+ when (NN1>Max) or (NN1+NC1>Max2) ->
+ case C of
+ [] ->
+ {B, {From,{To,FilledUp}}};
+ ShortList ->
+ T=#apR{a=NewNext,n=N2,nn=NN2} =
+ find_next(Next,ShortList),
+ {value, {C,NL_from}} =
+ lists:keysearch(C,1,F#apR.nl),
+ {value, {C,NL_to}} =
+ lists:keysearch(C,1,T#apR.nl),
+ NewNL_from = lists:keyreplace(
+ C,1,F#apR.nl,{C,NL_from--[I]}),
+ NewNL_to = lists:keyreplace(
+ C,1,T#apR.nl,{C,[I|NL_to]}),
+
+ NewT = T#apR{n=[I|N2],nn=NN2+1,
+ nl=NewNL_to},
+
+ {B#bit{n=NewNext,
+ s = if
+ S == lost_replica ->
+ lost_replica;
+ true ->
+ moved_replica
+ end},
+ {lists:keyreplace(
+ Next,#apR.a,From,
+ F#apR{n=N1--[I],nn=NN1-1,nl=NewNL_from}),
+ if
+ (NewT#apR.nn+NewT#apR.nc >= Max2)
+ or (NewT#apR.nn >= Max) ->
+ {lists:keydelete(NewNext,#apR.a,To),
+ [NewT|FilledUp]};
+ true ->
+ {lists:keyreplace(
+ NewNext,#apR.a,To,NewT),
+ FilledUp}
+ end}}
+ end;
+ _ ->
+ {B, {From,{To,FilledUp}}}
+ end;
+ (B, A) ->
+ {B, A}
+ end, {MoveFrom,{MoveTo,[]}},BL),
+ {BL1,Unchanged++AL1++AL2++AL3}.
+
+%%% -----------------------------------------------------------------
+%%% find_next/2
+%%%
+%%% ------------------------------------------------------------------
+
+find_next(Ap,L) ->
+ hd(catch
+ lists:foreach(
+ fun(SelVal) ->
+ case [ApR ||
+ ApR <- L,
+ begin
+ {value,{Ap,NL}} =
+ lists:keysearch(Ap,1,ApR#apR.nl),
+ length(NL) =< SelVal
+ end] of
+ [] ->
+ ok;
+ ShortList ->
+ throw(ShortList)
+ end
+ end,
+ lists:seq(0,?ap_indices))).
+
+%%% -----------------------------------------------------------------
+%%% max/2
+%%%
+%%% Calculates max number of indices per AP, given number of indices
+%%% and number of APs.
+%%% -----------------------------------------------------------------
+max(F,S) ->
+ (F div S) + if
+ (F rem S) == 0 ->
+ 0;
+ true ->
+ 1
+ end.
+
+%%% ==============================================================
+%%% ADMINISTRATIVE INFORMATION
+%%% ==============================================================
+%%% #Copyright (C) 2005
+%%% by ERICSSON TELECOM AB
+%%% S - 125 26 STOCKHOLM
+%%% SWEDEN, tel int + 46 8 719 0000
+%%%
+%%% The program may be used and/or copied only with the written
+%%% permission from ERICSSON TELECOM AB, or in accordance with
+%%% the terms and conditions stipulated in the agreement/contract
+%%% under which the program has been supplied.
+%%%
+%%% All rights reserved
+%%%
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/gcp.hrl b/lib/dialyzer/test/user_tests_SUITE_data/src/gcp.hrl
new file mode 100644
index 0000000000..aac87d8b6b
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/gcp.hrl
@@ -0,0 +1,166 @@
+%%% #0. BASIC INFORMATION
+%%% ----------------------------------------------------------
+%%% %CCaseFile: gcp.hrl %
+%%% Author: EAB/UPD/AV
+%%% Description: Internal include file.
+%%% ----------------------------------------------------------
+-hrl_id('9/190 55-CNA 113 033 Ux').
+-hrl_vsn('/main/R1A/21').
+-hrl_date('2005-05-31').
+-hrl_author('uabasve').
+%%% %CCaseTemplateFile: module.hrl %
+%%% %CCaseTemplateId: 17/002 01-FEA 202 714 Ux, Rev: /main/4 %
+%%%
+%%% Copyright (C) 2000-2005 by Ericsson Telecom AB
+%%% SE-126 25 STOCKHOLM
+%%% SWEDEN, tel int + 46 8 719 0000
+%%%
+%%% The program may be used and/or copied only with the written
+%%% permission from Ericsson Telecom AB, or in accordance with
+%%% the terms and conditions stipulated in the agreement/contract
+%%% under which the program has been supplied.
+%%%
+%%% All rights reserved
+%%%
+%%% ----------------------------------------------------------
+%%% #1. REVISION LOG
+%%% ----------------------------------------------------------
+%%% Rev Date Name What
+%%% ----- ------- -------- ------------------------
+%%% R1A/1 05-02-07 uabasve Copied from EAS R7A/9
+%%% R1A/2 05-02-08 ejojmjn Removed SAAL
+%%% R1A/3- 05-03-18 uabasve Clean.
+%%% ----------------------------------------------------------
+%%%
+%%% #2. CODE
+%%% #---------------------------------------------------------
+%%% #2.1 DEFINITION OF CONSTANTS
+%%% #---------------------------------------------------------
+
+%% Keys into gcpVariables for various options/values.
+-define(TRAFFIC_DESCRIPTOR_KEY, traffic_descriptor).
+
+%% H.248 version at link creation.
+-define(INITIAL_H248_VERSION, 1).
+
+%% Exceptions for use within a module. ?MODULE is just extra protection
+%% against catching something unexpected.
+-define(THROW(Reason), throw({error, ?MODULE, ?LINE, Reason})).
+-define(CATCH(Expr), try Expr
+ catch throw: ?FAILURE(Reason) -> {error, Reason}
+ end).
+-define(FAILURE(T), {error, ?MODULE, _, T}).
+
+%% The SendHandle used by a GCP transport process must be a tuple
+%% of length >= 2 whose first two elements are the pid of the
+%% transport process and index (aka #gcpLinkTable.key) of the link
+%% upon which incoming data has arrived.
+-define(SH_PID(SendHandle), element(1, SendHandle)).
+-define(SH_LINK(SendHandle), element(2, SendHandle)).
+-define(SH_SET_PID(SendHandle, Pid), setelement(1, SendHandle, Pid)).
+
+%% Megaco process that CH and OM servers monitor. This needs to be
+%% replaced by a documented method.
+-define(MEGACO_APP, megaco_config).
+
+%% The message that gcpI:send_reply sends to the process that's waiting
+%% for an action reply.
+-define(ACTION_REPLY_MESSAGE(ActionReplies, Result),
+ {reply, ActionReplies, Result}).
+
+%%% #---------------------------------------------------------
+%%% #2.2 DEFINITION OF RECORDS
+%%% #---------------------------------------------------------
+
+-record(mg, {pref}).
+-record(mgc, {mgid}).
+
+%% User configuration that gets mapped into megaco user info by
+%% gcpLib:make_user_info/1. GCP exposes only a subset of what's
+%% possible to set in megaco.
+-record(user_config,
+ {reply_timer = 30000, %% ms to wait for reply ack
+ %% Incoming transactions:
+ pending_timer = 10000, %% ms until outgoing transaction pending
+ sent_pending_limit = 5, %% nr of outgoing pendings before 506
+ %% Outgoing transactions:
+ recv_pending_limit = infinity,%% nr of incoming pendings before fail
+ request_timer = 3000, %% ms to wait for response before resend
+ request_retries = 5, %% nr unanswered sends before fail
+ long_request_timer = 15000, %% ms to wait for reply after pending
+ long_request_retries = 5}). %% nr of pendings/timeouts before fail
+
+%% Record passed into transport implementations at transport start.
+%% Expected to be passed back to gcpTransportI.
+-record(receive_handle,
+ {megaco_receive_handle, %% passed to megaco:receive_message
+ receive_message}). %% gcpLinkTable.receive_message
+
+%%% ---------------------------------------------------------------------------
+%%% # gcpRegistrationTable
+%%%
+%%% Record containing defined MGC's/MG's (aka megaco users).
+%%% ---------------------------------------------------------------------------
+
+-record(gcpRegistrationTable,
+ {key, %% user reference (aka MG/MGC id)
+ role, %% mg | mgc
+ mid, %% H.248 mid of the MGC/MG
+ version, %% of H.248
+ callback, %% {Module, ExtraArgs}
+ config = #user_config{}}).
+
+%%% ----------------------------------------------------------
+%%% # gcpLinkTable
+%%% ----------------------------------------------------------
+
+-record(gcpLinkTable,
+ {key, %% link reference
+ endpoint, %% #mgc{} | #mg{}
+ user, %% registration table key
+ chid, %% call handler of transport
+ admin_state, %% up | down
+ op_state, %% up | down | pending | disabled
+ restart = auto, %% auto | user
+ encoding_mod, %% module implementing megaco_encoder
+ encoding_config, %% as passed to encoding_mod
+ transport_start, %% {M,F,ExtraArgs} for transport start
+ transport_data, %% arbitrary, passed to transport_mod
+ send_message, %% {default|sysrpc|transport|module, Module}
+ receive_message, %% local | {M,F,ExtraArgs} for decode node
+ tried = false, %% Only for links owned by a MG.
+ %% Used to indicate that a setup attempt
+ %% has been performed on this link.
+ t95_period = 350000}).
+
+%%% ----------------------------------------------------------
+%%% # gcpActiveLinkTable
+%%% ----------------------------------------------------------
+
+-record(gcpActiveLinkTable,
+ {key, %% {mg|mgc, MgId}
+ link, %% link reference
+ chid, %% CH the link is tied to
+ node, %% node the link is on
+ conn_handle, %% record megaco_conn_handle
+ send_handle, %% {TransportPid, LinkIdx, ...}
+ version = ?INITIAL_H248_VERSION}).
+
+%%% ----------------------------------------------------------
+%%% # gcpVariables
+%%% ----------------------------------------------------------
+
+-record(gcpVariables,
+ {key,
+ value}).
+
+%%% ----------------------------------------------------------
+%%% # gcpReplyData
+%%% ----------------------------------------------------------
+
+-record(gcpReplyData,
+ {callback, %% {Module, Args}
+ mgid,
+ user_data, %% As passed by the user on send
+ prio,
+ timestamp}).
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/gcpFlowControl.erl b/lib/dialyzer/test/user_tests_SUITE_data/src/gcpFlowControl.erl
new file mode 100644
index 0000000000..1653220352
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/gcpFlowControl.erl
@@ -0,0 +1,397 @@
+%%%-------------------------------------------------------------------
+%%% File : gcpFlowControl.erl
+%%% Author : EAB/UPD/AV
+%%% Description : Implements overload protection.
+%%%-------------------------------------------------------------------
+-module(gcpFlowControl).
+-id('24/190 55-CNA 113 033 Ux').
+-vsn('/main/R1A/14').
+-date('2005-05-04').
+-author('uabasve').
+%%% ----------------------------------------------------------
+%%% %CCaseTemplateFile: module.erl %
+%%% %CCaseTemplateId: 16/002 01-FEA 202 714 Ux, Rev: /main/4 %
+%%%
+%%% Copyright (C) 2001-2005 by Ericsson Telecom AB
+%%% SE-126 25 STOCKHOLM
+%%% SWEDEN, tel int + 46 8 719 0000
+%%%
+%%% The program may be used and/or copied only with the written
+%%% permission from Ericsson Telecom AB, or in accordance with
+%%% the terms and conditions stipulated in the agreement/contract
+%%% under which the program has been supplied.
+%%%
+%%% All rights reserved
+%%%
+%%%
+%%% ----------------------------------------------------------
+%%% #1. REVISION LOG
+%%% ----------------------------------------------------------
+%%% Rev Date Name What
+%%% -------- -------- -------- ------------------------
+%%% R1A/1-2 05-02-07 ejojmjn Copied from EAS R7A/11.
+%%% R1A/3-14 05-03-14 uabasve Clean.
+%%%--------------------------------------------------------------------
+
+-include_lib("megaco/include/megaco.hrl").
+-include_lib("megaco/include/megaco_message_v1.hrl").
+-include("gcp.hrl").
+
+-export([send_request/4, %% user send from gcpInterface
+ receive_reply/2, %% from callback in gcpTransaction
+ init_ets_tables/1,
+ init_data/2]).
+
+-define(PRIO_INFINITY, 16).
+-define(MIN_WINDOW, 10).
+-define(MAX_WINDOW, 100).
+
+-define(BUCKET_MAX, 100).
+-define(BUCKET_THRESH_HIGH, 80).
+-define(BUCKET_THRESH_LOW, 20).
+
+-define(ALLOW_TIMEOUT, 1000).
+
+%% Holds counters for flow control in GCP
+-record(gcpFlowControlTable,
+ {key,
+ window = 50,
+ available = 50,
+ bucket = 0,
+ q = 0,
+ sent = 0, %% Counts all attempts
+ rejectable = 0, %% Counts rejectable attempts
+ t95,
+ errors = 0,
+ rejects = 0,
+ replies = 0}).
+
+-record(gcpFlowControlBitmap,
+ {key,
+ count = 0}).
+
+%%====================================================================
+%% External functions
+%%====================================================================
+
+%%--------------------------------------------------------------------
+%% Function: send_request/4
+%%
+%% Output: ok | {error, Reason}
+%%--------------------------------------------------------------------
+
+send_request(ActiveLink, TimerOptions, ActionRequests, UserData) ->
+ #gcpActiveLinkTable{key = Key,
+ conn_handle = ConnHandle}
+ = ActiveLink,
+ Prio = prio(ActionRequests),
+ incr(Key, sent),
+ case allow(Key, Prio) of
+ {true, Timestamp} ->
+ grant_request(user_data(ConnHandle),
+ Key,
+ Prio,
+ Timestamp,
+ ConnHandle,
+ TimerOptions,
+ ActionRequests,
+ UserData);
+ false ->
+ {error, rejected}
+ end.
+
+%%--------------------------------------------------------------------
+%% Function: receive_reply/2
+%% Description:
+%%--------------------------------------------------------------------
+
+receive_reply(Key, Timestamp) ->
+ incr(Key, available),
+ incr(Key, replies),
+ release(Key),
+ report_time(Key, Timestamp).
+
+%%--------------------------------------------------------------------
+%% Func: init_ets_tables/1
+%%
+%% Returns: ok
+%%--------------------------------------------------------------------
+
+init_ets_tables(Role) ->
+ create_ets(Role, gcpFlowControlTable, #gcpFlowControlTable.key),
+ create_ets(Role, gcpFlowControlBitmap, #gcpFlowControlBitmap.key),
+ ok.
+
+create_ets(Role, Table, Pos) when integer(Pos) ->
+ create_ets(Role,
+ Table,
+ [named_table, ordered_set, public, {keypos, Pos}]);
+
+create_ets(test, Table, ArgList) ->
+ ets:new(Table, ArgList);
+create_ets(Role, Table, ArgList) ->
+ case ets:info(Table) of
+ undefined ->
+ sysCmd:ets_new(Table, ArgList);
+ _ when Role == ch ->
+ sysCmd:inherit_tables([Table]);
+ _ when Role == om ->
+ ok
+ end.
+
+%%--------------------------------------------------------------------
+%% Func: init_data/2
+%%--------------------------------------------------------------------
+
+init_data(Key, T95) ->
+ ets:insert(gcpFlowControlTable, #gcpFlowControlTable{key = Key,
+ t95 = T95}).
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+%%% ----------------------------------------------------------
+%%% incr
+%%% ----------------------------------------------------------
+
+cntr(Key, Field) ->
+ incr(Key, Field, 0).
+
+incr(Key, Field) ->
+ incr(Key, Field, 1).
+
+-define(INCR(Field),
+ incr(Key, Field, X) -> upd_c(Key, {#gcpFlowControlTable.Field, X})).
+
+?INCR(sent);
+?INCR(replies);
+?INCR(q);
+?INCR(t95);
+?INCR(errors);
+?INCR(rejects);
+?INCR(rejectable);
+?INCR(window);
+?INCR(available);
+
+incr(Key, bucket, X)->
+ upd_c(Key, {#gcpFlowControlTable.bucket, X, ?BUCKET_MAX, ?BUCKET_MAX}).
+
+upd_c(Key, N) ->
+ ets:update_counter(gcpFlowControlTable, Key, N).
+
+%%% ----------------------------------------------------------
+%%% decr
+%%%
+%%% Beware that decr is implemented as incr, care has to be taken
+%%% not to bungle things when max/min values are used.
+%%% ----------------------------------------------------------
+
+decr(Key, available, X) ->
+ upd_c(Key, {#gcpFlowControlTable.available, -X});
+decr(Key, window, X) ->
+ upd_c(Key, {#gcpFlowControlTable.window, -X});
+decr(Key, bucket, X) ->
+ upd_c(Key, {#gcpFlowControlTable.bucket, -X, 0, 0}).
+
+decr(Key, Field) ->
+ decr(Key, Field, 1).
+
+%%% ----------------------------------------------------------
+%%% allow
+%%% ----------------------------------------------------------
+
+allow(Key, ?PRIO_INFINITY) ->
+ decr(Key, available),
+ {true, now()};
+
+allow(Key, Prio) ->
+ incr(Key, rejectable),
+ case decr(Key, available) of
+ N when N > 0 ->
+ {true, no_stamp};
+ _ ->
+ %% We did not send it, therefore incr available again
+ incr(Key, available),
+ queue(Key, Prio)
+ end.
+
+%%% ----------------------------------------------------------
+%%% queue
+%%% ----------------------------------------------------------
+
+queue(Key, Prio) ->
+ incr(Key, q),
+ T = {Key, Prio, now(), self()},
+ ets:insert(gcpFlowControlBitmap, #gcpFlowControlBitmap{key = T}),
+ wait(T).
+
+%%% ----------------------------------------------------------
+%%% wait
+%%% ----------------------------------------------------------
+
+wait({Key, _Prio, _When, _Self} = T) ->
+ receive
+ allow ->
+ ets:delete(gcpFlowControlBitmap, T),
+ decr(Key, available),
+ {true, no_stamp}
+ after ?ALLOW_TIMEOUT ->
+ timeout(T),
+ adjust_window(Key),
+ incr(Key, rejects),
+ false
+ end.
+
+timeout(T) ->
+ case ets:update_counter(gcpFlowControlBitmap, T, 1) of
+ 1 ->
+ %% Got the lock: no one has released Key and sent 'allow'.
+ ets:delete(gcpFlowControlBitmap, T),
+ ok;
+ _ ->
+ %% A releasing process got the lock: 'allow' has been
+ %% sent. Try to remove the message before proceeding.
+ %% (This is to keep mdisp from complaining apparently.)
+ ets:delete(gcpFlowControlBitmap, T),
+ receive
+ allow ->
+ ok
+ after ?ALLOW_TIMEOUT ->
+ io:format("~p: errant allow: ~p~n", [?MODULE, T])
+ end
+ end.
+
+%% Now, if we reject and our general response time is low
+%% (i.e. low bucket) then we increase the window size.
+adjust_window(Key) ->
+ adjust_window(Key,
+ cntr(Key, bucket) < ?BUCKET_THRESH_LOW
+ andalso cntr(Key, window) < ?MAX_WINDOW).
+
+adjust_window(Key, true) ->
+ incr(Key, window),
+ incr(Key, available),
+ incr(Key, bucket, 20);
+adjust_window(_, false) ->
+ ok.
+
+%%--------------------------------------------------------------------
+%% Func: report_time/2
+%%--------------------------------------------------------------------
+
+report_time(_, no_stamp) ->
+ ok;
+report_time(Key, {MS, S, Ms})->
+ {MegaSecs, Secs, MicroSecs} = now(),
+ p(Key,
+ MicroSecs - Ms + 1000000*(Secs - S + 1000000*(MegaSecs - MS)),
+ cntr(Key, t95)).
+
+%%% ----------------------------------------------------------
+%%% p
+%%% ----------------------------------------------------------
+
+p(Key, Time, T95) when Time =< T95 ->
+ decr(Key, bucket);
+p(Key, _Time, _T95) ->
+ %% If we have a long response time, then increase the leaky
+ %% bucket. If the bucket is over the high watermark and the window
+ %% is not already at its minimum size, then decrease the window
+ %% and available.
+ case {cntr(Key, window), incr(Key, bucket, 20)} of
+ {Window, Bucket} when Window > ?MIN_WINDOW,
+ Bucket > ?BUCKET_THRESH_HIGH ->
+ decr(Key, window),
+ decr(Key, available);
+ _ ->
+ ok
+ end.
+
+%%% ----------------------------------------------------------
+%%% release
+%%% ----------------------------------------------------------
+
+release(Key) ->
+ %% The choice of the key below will cause ets:prev/2 to return
+ %% the key with the highest priority which was queued most
+ %% recently. This relies on the fact that integers sort before
+ %% atoms, the atom 'prio' in this case. The atoms 'queued' and
+ %% 'pid' are of no significance.
+ release(Key, {Key, prio, queued, pid}).
+
+%% This isn't a (FIFO) queue within each priority, but a (LIFO) stack.
+
+release(Key, T) ->
+ release(Key, cntr(Key, available), ets:prev(gcpFlowControlBitmap, T)).
+
+%% Note that only keys on the same Key are matched.
+release(Key, N, {Key, _Prio, _When, Pid} = T) when N > 0 ->
+ case catch ets:update_counter(gcpFlowControlBitmap, T, 1) of
+ 1 ->
+ Pid ! allow;
+ _ ->
+ %% Another process has released this key.
+ release(Key, T)
+ end;
+
+release(_, _, _)->
+ ok.
+
+%%% ----------------------------------------------------------
+%%% user_data
+%%% ----------------------------------------------------------
+
+user_data(ConnHandle) ->
+ case catch megaco:conn_info(ConnHandle, reply_data) of
+ {'EXIT', _Reason} ->
+ false;
+ Rec ->
+ {value, Rec}
+ end.
+
+%%% ----------------------------------------------------------
+%%% grant_request
+%%% ----------------------------------------------------------
+
+grant_request({value, Rec},
+ Key, Prio, Time,
+ ConnHandle, Options, ActionRequests, UserData) ->
+ ReplyData = Rec#gcpReplyData{user_data = UserData,
+ prio = Prio,
+ timestamp = Time},
+ cast_rc(megaco:cast(ConnHandle,
+ ActionRequests,
+ [{reply_data, ReplyData} | Options]),
+ Key,
+ ActionRequests);
+
+grant_request(false, Key, _, _, _, _, _, _) ->
+ incr(Key, available),
+ {error, reply_data}.
+
+cast_rc(ok = Ok, _, _) ->
+ Ok;
+cast_rc({error, Reason}, Key, ActionRequests) ->
+ incr(Key, available),
+ gcpLib:error_report(?MODULE, send_request, [ActionRequests],
+ "send failed",
+ Reason),
+ {error, {encode, Reason}}.
+
+%%--------------------------------------------------------------------
+%% Func: prio/1
+%% Returns: The priority of the request
+%%--------------------------------------------------------------------
+
+prio([ActionRequest | _]) ->
+ #'ActionRequest'{contextId = ContextId,
+ contextRequest = ContextRequest}
+ = ActionRequest,
+ prio(ContextId, ContextRequest).
+
+prio(?megaco_choose_context_id, #'ContextRequest'{priority = Prio})
+ when integer(Prio) ->
+ Prio;
+prio(_, _) ->
+ ?PRIO_INFINITY.
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/qlc_error.erl b/lib/dialyzer/test/user_tests_SUITE_data/src/qlc_error.erl
new file mode 100644
index 0000000000..a6865c4562
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/qlc_error.erl
@@ -0,0 +1,15 @@
+%% -*- erlang-indent-level: 2 -*-
+%% $Id: qlc_error.erl,v 1.1 2008/12/17 09:53:52 mikpe Exp $
+
+%% @author Daniel Luna <[email protected]>
+%% @copyright 2006 Daniel Luna
+%%
+%% @doc
+%%
+
+-module(qlc_error).
+-export([fix/0]).
+-include_lib("stdlib/include/qlc.hrl").
+
+fix() ->
+ qlc:eval(qlc:q([I || I <- []])).
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/spvcOrig.erl b/lib/dialyzer/test/user_tests_SUITE_data/src/spvcOrig.erl
new file mode 100644
index 0000000000..70a3c4c7e2
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/spvcOrig.erl
@@ -0,0 +1,3523 @@
+%%%=======================================================================
+%%%
+%%% Test from Mats Cronqvist <[email protected]>. The
+%%% analysis crasched due to the handling of tuples-as-funs in
+%%% hipe_icode_type.erl, and it also exposed a bug when a control flow
+%%% path is first analyzed and then shown to be infeasible.
+%%%
+
+-file("./spvcOrig.erl", 1).
+
+-module(spvcOrig).
+
+-author(qamarma).
+
+-id('3/190 55-CNA 121 64').
+
+-vsn('/main/Inc4/R2A/R4A/R6A/R7A/R7D/R8B/R10A/R11A/2').
+
+-date('2004-10-26').
+
+-export([gen_set/3,gen_set/4,connect/3,release_comp_nu/3,release_nu/3,timeout/2,restart_spvc/1,restart_multi_spvcs/1,forced_release/1,error_handler/3,get_backoff_table/2,timeout_event/1]).
+
+-export([release_incumbent/2,switch_over/2]).
+
+-export([call_failure/1,get_backoff_table/2]).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/pchTables.hrl", 1).
+
+-hrl_id('2/190 55-CNA 121 08').
+
+-hrl_vsn('/main/Inc3/Inc4/R2A/R3A/R3B/R5A/R6A/R7A/R7D/R8B/13').
+
+-hrl_date('2003-01-24').
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../derived_hrl/mib/AXD301-PCH-MIB.hrl", 1).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/pchTables.hrl", 58).
+
+-record(pchVp, {vplEntry,
+ vplLastChange,
+ vplReceiveTrafficDescrIndex = 0,
+ vplTransmitTrafficDescrIndex = 0,
+ vplCcIdentifier,
+ vplConnId,
+ vplMpId,
+ vplLeafId,
+ vplChargingIndicator = 1,
+ vplRemoteChargingInd = 1,
+ vplChargablePartyIdentifier,
+ vplSegmentEndPoint = 2,
+ vplRowStatus,
+ vplCastType = 1,
+ vplConnKind = 1,
+ vplServiceType = 2,
+ vplEndPointData,
+ vplContinuityCheck = 1,
+ vplUpcNpcMode = 2,
+ vplPreventInbandCc = 1,
+ vplMonAisRdi = 2,
+ vpcAdminStatus = 2,
+ vplSpvcAutoTarget = 2,
+ vplSchedulingFlag = 2,
+ vplApplication,
+ vplRemoteData,
+ vpccAdminStatus = 2,
+ vplContCheckSearch = 1,
+ vplPmSearch = 1,
+ vplLastBuffFlagRead,
+ vplShapingMode = 1,
+ vplGroupShapingId}).
+
+-record(pchVpDb, {vplEntry,
+ vplLastChange,
+ vplReceiveTrafficDescrIndex = 0,
+ vplTransmitTrafficDescrIndex = 0,
+ vplCcIdentifier,
+ vplConnId,
+ vplMpId,
+ vplLeafId,
+ vplAttributes,
+ vplChargablePartyIdentifier,
+ vplRowStatus,
+ vplEndPointData,
+ vplApplication,
+ vplRemoteData,
+ vplLastBuffFlagRead,
+ vplShapingMode,
+ vplGroupShapingId}).
+
+-record(pchVpExt, {vplExtEntry,
+ vplExtReceiveTdIndex,
+ vplExtTransmitTdIndex,
+ vplExtUserName = [],
+ vplExtProviderName = [],
+ vplExtUserOperator}).
+
+-record(pchVc, {vclEntry,
+ vclLastChange,
+ vclReceiveTrafficDescrIndex = 0,
+ vclTransmitTrafficDescrIndex = 0,
+ vclCcIdentifier,
+ vclConnId,
+ vclMpId,
+ vclLeafId,
+ vclChargingIndicator = 1,
+ vclRemoteChargingInd = 1,
+ vclChargablePartyIdentifier,
+ vclPacketDiscard = 2,
+ vclSegmentEndPoint = 2,
+ vclRowStatus,
+ vclCastType = 1,
+ vclConnKind = 1,
+ vclContinuityCheck = 1,
+ vclUpcNpcMode = 2,
+ vclEndPointData,
+ vclPreventInbandCc = 1,
+ vclMonAisRdi = 2,
+ vclSpvcAutoTarget = 2,
+ vclSchedulingFlag = 2,
+ vclApplication,
+ vclRemoteData,
+ vcccAdminStatus = 2,
+ vclContCheckSearch = 1,
+ vclPmSearch = 1,
+ vclLastBuffFlagRead,
+ vclChargingIfChanid,
+ vclShapingMode = 1}).
+
+-record(pchVcDb, {vclEntry,
+ vclLastChange,
+ vclReceiveTrafficDescrIndex = 0,
+ vclTransmitTrafficDescrIndex = 0,
+ vclCcIdentifier,
+ vclConnId,
+ vclMpId,
+ vclLeafId,
+ vclAttributes,
+ vclChargablePartyIdentifier,
+ vclRowStatus,
+ vclEndPointData,
+ vclApplication,
+ vclRemoteData,
+ vclLastBuffFlagRead,
+ vclChargingIfChanid,
+ vclShapingMode}).
+
+-record(pchAtd, {tdIndex,
+ tdType,
+ tdParam1 = 0,
+ tdParam2 = 0,
+ tdParam3 = 0,
+ tdParam4 = 0,
+ tdParam5 = 0,
+ tdTrafficQoSClass = 0,
+ tdRowStatus = 1,
+ tdServiceCategory = 6,
+ tdVcCapability = 1,
+ tdName = [],
+ tdUserCounter = 0,
+ tdUser = []}).
+
+-record(pchAbr, {abrIndex,
+ abrIcr,
+ abrTbe = 16277215,
+ abrFrtt = 0,
+ abrRdf = 11,
+ abrRif = 11,
+ abrNrm = 4,
+ abrTrm = 7,
+ abrCdf = 3,
+ abrAdtf = 50,
+ abrRowStatus = 1}).
+
+-record(pchIndexNext, {key,
+ tdIndexNext,
+ vpccIndexNext,
+ vcccIndexNext,
+ scheduledVpCcIndexNext,
+ scheduledVcCcIndexNext}).
+
+-record(pchSchedVpCc, {schedVpCcIndex,
+ schedVpCcTarget,
+ schedVpCcReceiveTdIndex,
+ schedVpCcTransmitTdIndex,
+ schedVpCcOpTime,
+ schedVpCcOpInd,
+ schedVpCcOpStatus,
+ schedVpCcTimerRef,
+ schedVpCcRowStatus,
+ schedVpCcErrorCode,
+ schedVpCcUserName = [],
+ schedVpCcProviderName = []}).
+
+-record(pchVpCc, {vpccId,
+ vpccUserName = [],
+ vpccAdminStatus,
+ vpccApplication,
+ vpccProviderName = []}).
+
+-record(pchSchedVcCc, {schedVcCcIndex,
+ schedVcCcTarget,
+ schedVcCcReceiveTdIndex,
+ schedVcCcTransmitTdIndex,
+ schedVcCcOpTime,
+ schedVcCcOpInd,
+ schedVcCcOpStatus,
+ schedVcCcTimerRef,
+ schedVcCcRowStatus,
+ schedVcCcErrorCode,
+ schedVcCcUserName = [],
+ schedVcCcProviderName = []}).
+
+-record(pchVcCc, {vcccId,
+ vcccUserName = [],
+ vcccAdminStatus,
+ vcccApplication,
+ vcccProviderName = []}).
+
+-record(pchSigChannels, {et_entry,
+ cp_entry,
+ sb_cp_entry,
+ membership,
+ status,
+ sb_status,
+ application = {0,[]}}).
+
+-record(pchSigChannelExt, {et_entry,
+ user_name,
+ provider_name}).
+
+-record(pchApplication, {key,
+ application,
+ rights}).
+
+-record(pchCurrAlarm, {key,
+ type_of_fault,
+ fault_id}).
+
+-record(pchIfAddress, {ifAddressEntry,
+ ifAddressRowStatus}).
+
+-record(pchAddressToIf, {address,
+ if_index}).
+
+-record(pchPreferences, {key,
+ if_format}).
+
+-record(pchSigChannelCallback, {key,
+ callback,
+ function,
+ args,
+ data}).
+
+-record(pchTermHcId, {hcId,
+ vclEntry}).
+
+-record(pchChg, {chgEntry,
+ chgStatus}).
+
+-record(pchCommState, {key,
+ ccid,
+ request,
+ low_cp_state,
+ high_cp_state,
+ et_side,
+ application,
+ data,
+ timestamp,
+ timer_id,
+ callback}).
+
+-record(pchBufferedCmd, {key,
+ resource,
+ module,
+ function,
+ arguments,
+ data}).
+
+-record(pchAnswerCh, {conn_id,
+ chg_data,
+ call_back_cp,
+ old_rtd,
+ old_ttd,
+ old_EpData,
+ action,
+ resource,
+ data,
+ fail_cause}).
+
+-record(pchAnswerOm, {conn_id}).
+
+-record(ccPch, {rowInd,
+ admState = 2}).
+
+-record(pchIf, {ilmiVpi = 0,
+ ilmiVci = 0,
+ ilmiS = 1,
+ ilmiT = 5,
+ ilmiK = 4,
+ neighborIfName = [],
+ neighborIpAddr = [0,0,0,0],
+ maxVciSvc,
+ overbookingFactor = {0,0},
+ shapingMode = 0,
+ maxVpiSvc,
+ cdvtMultFactor = 100,
+ scBandwidth1 = 0,
+ scBandwidth2 = 0,
+ scBandwidth3 = 0,
+ scBandwidth4 = 0}).
+
+-record(pchMpTemp, {key,
+ data}).
+
+-record(pchLatestErrorCode, {key,
+ errorCode}).
+
+-record(pchRangeTable, {node,
+ tdIndexRange,
+ vpccIndexRange,
+ vcccIndexRange}).
+
+-record(pchIndexBitmaps, {key,
+ available,
+ bitmap}).
+
+-record(pchLinkState, {key,
+ op_state,
+ last_change}).
+
+-record(pchFailedVpl, {vplEntry,
+ vplLastChange}).
+
+-record(pchFailedVcl, {vclEntry,
+ vclLastChange}).
+
+-record(pchStatCounters, {key,
+ ingress,
+ egress}).
+
+-record(pchEtStatTable, {index,
+ value = 0}).
+
+-record(pchAuditResult, {key,
+ passed,
+ not_passed,
+ sizes,
+ obj_keys}).
+
+-record(pch_fault_reqc, {fault_type,
+ fault_location}).
+
+-record(pch_cid, {conn_id,
+ mp_id,
+ leaf_id}).
+
+-file("./spvcOrig.erl", 207).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/pchI.hrl", 1).
+
+-hrl_id('52/190 55-CNA 121 08 Ux').
+
+-hrl_vsn('/main/R6A/R7A/R7D/R8B/3').
+
+-hrl_date('2002-10-14').
+
+-hrl_author(uabdomo).
+
+-record(pch_vc_rec, {ifIndex,
+ vpi,
+ vci,
+ application}).
+
+-record(pch_vp_rec, {ifIndex,
+ vpi}).
+
+-record(pch_td_index, {rtd_index,
+ ttd_index}).
+
+-record(pch_td, {service_cat,
+ pcr,
+ scr,
+ mbs,
+ mcr,
+ cdvt,
+ tagging,
+ clp_significance}).
+
+-record(pch_call_back_req, {module,
+ function,
+ user_data}).
+
+-record(pch_chg_rec, {chg_type,
+ chg_interface,
+ chg_chan_id,
+ chg_party_name}).
+
+-record(pch_polic_rec, {policing,
+ packet_discard}).
+
+-record(pch_user_name_rec, {user_name}).
+
+-record(pch_shaping_rec, {shaping}).
+
+-record(pch_audit_callback, {mod,
+ arg}).
+
+-file("./spvcOrig.erl", 208).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/plc.hrl", 1).
+
+-hrl_id('12/190 55-CNA 121 45 Ux').
+
+-hrl_vsn('/main/R6A/R6B/R7A/R7D/R8B/R9A/R11A/4').
+
+-hrl_date('2004-12-07').
+
+-hrl_author(ethrba).
+
+-record(plcQueues, {name,
+ type,
+ weight,
+ maxlength,
+ owner}).
+
+-record(plcSettings, {flag,
+ value}).
+
+-record(plcAlarm, {flag,
+ value}).
+
+-file("./spvcOrig.erl", 209).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/spvcTables.hrl", 1).
+
+-hrl_id('10/190 55-CNA 121 64').
+
+-hrl_vsn('/main/Inc4/R2A/R3A/R3B/R5A/R6A/R7A/R7D/R8B/4').
+
+-hrl_date('2003-02-12').
+
+-hrl_author(etxovp).
+
+-record(spvcVpc, {spvcVpcEntry,
+ spvcVpcTargetAddress,
+ spvcVpcTargetSelectType,
+ spvcVpcTargetVpi,
+ spvcVpcLastReleaseCause,
+ spvcVpcLastReleaseDiagnostic,
+ spvcVpcRetryInterval = 1000,
+ spvcVpcRetryTimer = 0,
+ spvcVpcRetryThreshold = 1,
+ spvcVpcRetryFailures = 0,
+ spvcVpcRetryLimit = 15,
+ spvcVpcRowStatus,
+ spvcVpcUserName = [],
+ spvcVpcProviderName = [],
+ currentState,
+ crankBackCounter = 0,
+ spvcVpcApplication,
+ spvcRerCap = false,
+ spvcRerStatus = false}).
+
+-record(spvcVpcOpState, {state,
+ timeOfChange}).
+
+-record(spvcVpcPerm, {spvcVpcEntry,
+ spvcVpcTargetAddress,
+ spvcVpcTargetSelectType,
+ spvcVpcTargetVpi,
+ spvcVpcRetryInterval = 1000,
+ spvcVpcRetryThreshold = 1,
+ spvcVpcRetryLimit = 15,
+ spvcVpcRowStatus,
+ spvcVpcUserName,
+ spvcVpcProviderName,
+ spvcVpcApplication}).
+
+-record(spvcVpcDyn, {spvcVpcEntry,
+ spvcVpcLastReleaseCause,
+ spvcVpcLastReleaseDiagnostic,
+ spvcVpcRetryTimer = 0,
+ spvcVpcRetryFailures = 0,
+ currentState,
+ crankBackCounter = 0}).
+
+-record(spvcVcc, {spvcVccEntry,
+ spvcVccTargetAddress,
+ spvcVccTargetSelectType,
+ spvcVccTargetVpi,
+ spvcVccTargetVci,
+ spvcVccLastReleaseCause,
+ spvcVccLastReleaseDiagnostic,
+ spvcVccRetryInterval = 1000,
+ spvcVccRetryTimer = 0,
+ spvcVccRetryThreshold = 1,
+ spvcVccRetryFailures = 0,
+ spvcVccRetryLimit = 15,
+ spvcVccRowStatus,
+ spvcVccUserName = [],
+ spvcVccProviderName = [],
+ currentState,
+ crankBackCounter = 0,
+ spvcVccTargetDlci,
+ spvcVccTargetType,
+ spvcVccApplication,
+ spvcVccFrKey,
+ spvcVccTranslationMode,
+ spvcRerCap = false,
+ spvcRerStatus = false}).
+
+-record(spvcVccOpState, {state,
+ timeOfChange}).
+
+-record(spvcVccPerm, {spvcVccEntry,
+ spvcVccTargetAddress,
+ spvcVccTargetSelectType,
+ spvcVccTargetVpi,
+ spvcVccTargetVci,
+ spvcVccRetryInterval = 1000,
+ spvcVccRetryThreshold = 1,
+ spvcVccRetryLimit = 15,
+ spvcVccRowStatus,
+ spvcVccUserName,
+ spvcVccProviderName,
+ spvcVccTargetDlci,
+ spvcVccTargetType,
+ spvcVccApplication,
+ spvcVccFrKey,
+ spvcVccTranslationMode = 2}).
+
+-record(spvcVccDyn, {spvcVccEntry,
+ spvcVccLastReleaseCause,
+ spvcVccLastReleaseDiagnostic,
+ spvcVccRetryTimer = 0,
+ spvcVccRetryFailures = 0,
+ currentState,
+ crankBackCounter = 0}).
+
+-record(spvcFailures, {dummy_key,
+ spvcCallFailuresTrapEnable = 2,
+ spvcNotificationInterval = 30,
+ backoff_interval = 0.100000,
+ delay_factor = 2,
+ max_delay = 200000}).
+
+-record(spvcCounters, {key,
+ value}).
+
+-record(spvcEventIndicator, {dummy_key,
+ spvcTimerInd = 2,
+ spvcSendEventInd = 2}).
+
+-record(spvcIndexNext, {dummy_key,
+ schedVccIndexNext = 1,
+ schedVpcIndexNext = 1}).
+
+-record(spvcHcIdToTp, {hcId,
+ tpEntry}).
+
+-record(spvcTpToHcId, {tpEntry,
+ hcId,
+ orig_number,
+ orig_vpi,
+ orig_vci,
+ orig_dlci,
+ frKey}).
+
+-record(spvcSchedVpc, {schedVpcIndex,
+ schedVpcSource,
+ schedVpcTargetAddr,
+ schedVpcTargetSelType,
+ schedVpcTargetVpi,
+ schedVpcRetryInt,
+ schedVpcRetryThres,
+ schedVpcRetryLimit,
+ schedVpcOpTime,
+ schedVpcOpInd,
+ schedVpcOpStatus,
+ schedVpcTimerRef,
+ schedVpcRowStatus,
+ schedVpcUserName,
+ schedVpcProviderName,
+ schedVpcFaultCause,
+ schedVpcRerCap = false}).
+
+-record(spvcSchedVcc, {schedVccIndex,
+ schedVccSource,
+ schedVccTargetAddr,
+ schedVccTargetSelType,
+ schedVccTargetVpi,
+ schedVccTargetVci,
+ schedVccRetryInt,
+ schedVccRetryThres,
+ schedVccRetryLimit,
+ schedVccOpTime,
+ schedVccOpInd,
+ schedVccOpStatus,
+ schedVccTimerRef,
+ schedVccRowStatus,
+ schedVccUserName,
+ schedVccProviderName,
+ schedVccFaultCause,
+ schedVccRerCap = false}).
+
+-record(spvcCurrAlarm, {key,
+ fault_id,
+ data}).
+
+-record(spvcChg, {key,
+ data}).
+
+-record(spvcBackoff, {key,
+ delay_time,
+ flag}).
+
+-record(spvcAutoVp, {entry,
+ lastChange,
+ receiveTrafficDescrIndex,
+ transmitTrafficDescrIndex,
+ ccIdentifier,
+ connId,
+ mpId,
+ leafId,
+ chargingIndicator = 1,
+ remoteChargingInd = 1,
+ chargablePartyIdentifier,
+ segmentEndPoint = 2,
+ rowStatus,
+ castType = 1,
+ connKind,
+ serviceType = 2,
+ endPointData,
+ continuityCheck = 1,
+ upcNpcMode = 2,
+ preventInbandCc = 1,
+ monAisRdi = 2,
+ adminStatus,
+ autoTarget = 1,
+ schedulingFlag = 2,
+ application = [],
+ remoteData,
+ vpccAdminStatus = 2,
+ contCheckSearch = 1,
+ pmSearch = 1,
+ lastBuffFlagRead,
+ shapingMode = 1,
+ groupShapingId}).
+
+-record(spvcAutoVc, {entry,
+ lastChange,
+ receiveTrafficDescrIndex,
+ transmitTrafficDescrIndex,
+ ccIdentifier,
+ connId,
+ mpId,
+ leafId,
+ chargingIndicator = 1,
+ remoteChargingInd = 1,
+ chargablePartyIdentifier,
+ packetDiscard = 2,
+ segmentEndPoint = 2,
+ rowStatus,
+ castType = 1,
+ connKind,
+ continuityCheck = 1,
+ upcNpcMode = 2,
+ endPointData,
+ preventInbandCc = 1,
+ monAisRdi = 2,
+ autoTarget = 1,
+ schedulingFlag = 2,
+ application = [],
+ remoteData,
+ vcccAdminStatus = 2,
+ contCheckSearch = 1,
+ pmSearch = 1,
+ lastBuffFlagRead,
+ chargingIfChanid,
+ shapingMode = 1}).
+
+-record(spvcAutoAtd, {index,
+ type,
+ param1 = 0,
+ param2 = 0,
+ param3 = 0,
+ param4 = 0,
+ param5 = 0,
+ trafficQoSClass = 0,
+ rowStatus = 1,
+ serviceCategory = 6,
+ vcCapability = 1,
+ name = [],
+ userCounter = 0}).
+
+-record(spvcAutoAbr, {index,
+ icr,
+ tbe = 16277215,
+ frtt = 0,
+ rdf = 11,
+ rif = 11,
+ nrm = 4,
+ trm = 7,
+ cdf = 3,
+ adtf = 50,
+ rowStatus = 1}).
+
+-record(spvcLatestErrorCode, {key,
+ errorCode}).
+
+-record(spvcVcDyn, {vclEntry,
+ vclCcIdentifier,
+ vclConnId,
+ vclMpId,
+ vclLeafId}).
+
+-record(spvcVpDyn, {vplEntry,
+ vplCcIdentifier,
+ vplConnId,
+ vplMpId,
+ vplLeafId}).
+
+-record(spvcObj, {spvcEntry,
+ spvcTargetAddress,
+ spvcTargetSelectType,
+ spvcTargetVpi,
+ spvcTargetVci,
+ spvcLastReleaseCause,
+ spvcLastReleaseDiagnostic,
+ spvcRetryInterval = 1000,
+ spvcRetryTimer = 0,
+ spvcRetryThreshold = 1,
+ spvcRetryFailures = 0,
+ spvcRetryLimit = 15,
+ spvcRowStatus,
+ spvcUserName,
+ spvcProviderName,
+ currentState,
+ spvcTargetDlci,
+ spvcTargetType,
+ spvcApplication,
+ spvcFrKey,
+ spvcVccTranslationMode = 2,
+ spvcRerCap = false,
+ spvcRerStatus = false}).
+
+-record(spvcTargetVc, {entry,
+ userName = [],
+ providerName = [],
+ opState,
+ rowStatus}).
+
+-record(spvcTargetVp, {entry,
+ userName = [],
+ providerName = [],
+ opState,
+ rowStatus}).
+
+-record(spvcReestablishTimer, {time,
+ timer_id,
+ module,
+ function,
+ args}).
+
+-record(spvcRerVp, {entry,
+ rerCap,
+ rerData}).
+
+-record(spvcRerVc, {entry,
+ rerCap,
+ rerData}).
+
+-record(spvcHcEtStat, {key,
+ counter = 0}).
+
+-record(spvcSaEtStat, {key,
+ counter = 0}).
+
+-file("./spvcOrig.erl", 210).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/spvcDefines.hrl", 1).
+
+-hrl_id('41/190 55-CNA 121 64 Ux').
+
+-hrl_vsn('/main/R6A/R7A/R7D/R8B/3').
+
+-hrl_date('2003-02-21').
+
+-hrl_author(etxhebl).
+
+-file("./spvcOrig.erl", 211).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/spvcFr.hrl", 1).
+
+-hrl_id('48/190 55-CNA 121 64 Ux').
+
+-hrl_vsn('/main/R7A/R7D/2').
+
+-hrl_date('2001-12-06').
+
+-hrl_author(etxhtb).
+
+-record(spvcFr, {spvcFrEntry,
+ spvcFrAtmEntry,
+ spvcFrTargetAddress,
+ spvcFrTargetSelectType,
+ spvcFrTargetIdentifier,
+ spvcFrTargetVpi,
+ spvcFrTargetVci,
+ spvcFrAtmTranslation,
+ spvcFrLastReleaseCause,
+ spvcFrLastReleaseDiagnostic,
+ spvcFrAdminStatus,
+ spvcFrRetryInterval = 1000,
+ spvcFrRetryTimer = 0,
+ spvcFrRetryThreshold = 1,
+ spvcFrRetryFailures = 0,
+ spvcFrRetryLimit = 15,
+ spvcFrRowStatus,
+ spvcFrUserName,
+ spvcFrProviderName,
+ currentState}).
+
+-record(spvcFrPerm, {spvcFrEntry,
+ spvcFrAtmEntry,
+ spvcFrAtmTranslation,
+ spvcFrAdminStatus,
+ spvcFrConnect}).
+
+-record(spvcFrAddress, {addressEntry,
+ addressRowStatus}).
+
+-record(spvcFrAddressToIf, {address,
+ if_index}).
+
+-record(fr_end_point, {ifIndex,
+ dlci}).
+
+-record(fr_atm_translation, {routedIp = off,
+ routedOsi = off,
+ otherRouted = off,
+ arpTranslation = off}).
+
+-record(link_layer_core_parameters, {outgoing_max_ifs,
+ incoming_max_ifs}).
+
+-record(priority_and_service_class, {outgoing_transfer_priority,
+ incoming_transfer_priority,
+ outgoing_discard_priority,
+ incoming_discard_priority}).
+
+-file("./spvcOrig.erl", 212).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../derived_hrl/mib/AXD301-PCH-MIB.hrl", 1).
+
+-file("./spvcOrig.erl", 213).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../derived_hrl/mib/AXD301-SPVC-MIB.hrl", 1).
+
+-file("./spvcOrig.erl", 214).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../derived_hrl/mib/AXD301-FRSPVC-MIB.hrl", 1).
+
+-file("./spvcOrig.erl", 215).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/sysDefines.hrl", 1).
+
+-hrl_id('3/190 55-CNA 121 70').
+
+-hrl_vsn('/main/Inc3/Inc4/Inc5/R3B/R4A/R5B/R6A/R7A/R8B/2').
+
+-hrl_date('2002-06-07').
+
+-hrl_author(etxjotj).
+
+-file("./spvcOrig.erl", 216).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/hciMsg.hrl", 1).
+
+-hrl_id('4/190 55-CNA 121 159 Ux').
+
+-hrl_vsn('/main/R7A/R8B/10').
+
+-hrl_date('2003-02-21').
+
+-hrl_author(etxmexa).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/hciComp.hrl", 1).
+
+-hrl_id('3/190 55-CNA 121 159 Ux').
+
+-hrl_vsn('/main/R7A/1').
+
+-hrl_date('00-03-22').
+
+-hrl_author(etxmexa).
+
+-record(hci_comp_info, {required_FC = 0,
+ desired_FC = 0}).
+
+-record(hci_comp_res, {not_supported_required_FCs,
+ not_supported_desired_FCs,
+ all_supported_FCs}).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/hciMsg.hrl", 14).
+
+-record(hci_add_party, {hci_cpn,
+ hci_aal,
+ hci_bhli,
+ hci_blli,
+ hci_blli_bici,
+ hci_bsco,
+ hci_epr,
+ hci_e2etd,
+ hci_noti,
+ hci_cpsa,
+ hci_clpn,
+ hci_clpsa,
+ hci_cpn_soft,
+ hci_clpn_soft,
+ hci_geidt_list = [],
+ hci_dtl_bin_list = [],
+ hci_pa_list = [],
+ hci_gat_list = [],
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_add_party_ack, {hci_epr,
+ hci_aal,
+ hci_blli,
+ hci_blli_bici,
+ hci_e2etd,
+ hci_noti,
+ hci_cpn_soft,
+ hci_cnosa,
+ hci_cno,
+ hci_geidt_list = [],
+ hci_pa_list = [],
+ hci_gat_list = [],
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_add_party_rej, {hci_cause,
+ hci_epr,
+ hci_geidt_list = [],
+ hci_cb,
+ hci_pa_list = [],
+ hci_internal_rel_info,
+ hci_gat_list = [],
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_alerting, {hci_mci,
+ hci_unrps,
+ hci_cdpi,
+ hci_epr,
+ hci_prog_list = [],
+ hci_nbc,
+ hci_nbhlc,
+ hci_noti,
+ hci_geidt_list = [],
+ hci_pa_list = [],
+ hci_gat_list = [],
+ hci_ssie,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_b_resources, {hci_rem_dataB,
+ hci_vpiB,
+ hci_vciB,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_connect, {hci_mci,
+ hci_unrps,
+ hci_aal,
+ hci_blli,
+ hci_blli_bici,
+ hci_epr,
+ hci_atd,
+ hci_e2etd,
+ hci_noti,
+ hci_abrs,
+ hci_abra,
+ hci_nbc,
+ hci_nbhlc,
+ hci_nbllc,
+ hci_prog_list = [],
+ hci_geidt_list = [],
+ hci_eqos,
+ hci_cpn_soft,
+ hci_cnosa,
+ hci_cno,
+ hci_pa_list = [],
+ hci_gat_list = [],
+ hci_rem_dataB,
+ hci_con_dir = both,
+ hci_ssie,
+ hci_rer_services,
+ hci_rer,
+ hci_opt_traf,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_drop_party, {hci_cause,
+ hci_epr,
+ hci_noti,
+ hci_geidt_list = [],
+ hci_pa_list = [],
+ hci_internal_rel_info,
+ hci_gat_list = [],
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_local_connect, {hci_rem_data,
+ hci_con_dir,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_local_connected, {hci_rem_data,
+ hci_con_dir,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_local_disconnect, {hci_discon_dir,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_local_disconnected, {hci_data,
+ hci_prot_comp}).
+
+-record(hci_notify, {hci_epr,
+ hci_noti,
+ hci_pa_list = [],
+ hci_gat_list = [],
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_party_alerting, {hci_epr,
+ hci_noti,
+ hci_geidt_list = [],
+ hci_pa_list = [],
+ hci_gat_list = [],
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_progress, {hci_mci,
+ hci_unrps,
+ hci_cdpi,
+ hci_prog_list = [],
+ hci_nbc,
+ hci_nbhlc,
+ hci_noti,
+ hci_pa_list = [],
+ hci_gat_list = [],
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_release, {hci_mci,
+ hci_unrps,
+ hci_cause_list = [],
+ hci_noti,
+ hci_prog_list = [],
+ hci_geidt_list = [],
+ hci_cb,
+ hci_pa_list = [],
+ hci_internal_rel_info,
+ hci_gat_list = [],
+ hci_ssie,
+ hci_rer_cause,
+ hci_data,
+ hci_prot_comp,
+ hci_internal_dbg_cc,
+ hci_internal_dbg_l3}).
+
+-record(hci_setup, {hci_mci,
+ hci_unrps,
+ hci_atd,
+ hci_bbc,
+ hci_qos,
+ hci_cpn,
+ hci_aal,
+ hci_bhli,
+ hci_blli_brep,
+ hci_blli_bici,
+ hci_bsco,
+ hci_epr,
+ hci_lpt,
+ hci_e2etd,
+ hci_noti,
+ hci_abrs,
+ hci_abra,
+ hci_prog_list = [],
+ hci_eqos,
+ hci_cpsa_list = [],
+ hci_clpn,
+ hci_bici_clpn,
+ hci_clpsa_list = [],
+ hci_cgpc,
+ hci_nbc_brep,
+ hci_nbhlc_list = [],
+ hci_nbllc_brep,
+ hci_conss,
+ hci_geidt_list = [],
+ hci_cpn_soft,
+ hci_clpn_soft,
+ hci_dtl_bin_list = [],
+ hci_pa_list = [],
+ hci_ncci,
+ hci_routing_address,
+ hci_protocol_internal_info,
+ hci_gat_list = [],
+ hci_con_dir = both,
+ hci_ssie,
+ hci_rer_services,
+ hci_rer,
+ hci_opt_traf,
+ hci_data_setup,
+ hci_prot_comp}).
+
+-record(hci_setup_ack, {hci_assign,
+ hci_rem_dataB,
+ hci_con_dir = both,
+ hci_vpiB,
+ hci_vciB,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_status, {hci_state,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_status_enq, {hci_state,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_remote_data, {hci_prot_type,
+ hci_data,
+ hci_dummy1,
+ hci_dummy2}).
+
+-record(hci_unrec, {hci_mci,
+ hci_head,
+ hci_binary,
+ hci_data,
+ hci_prot_comp}).
+
+-record(hci_atd, {hci_pci,
+ hci_apci,
+ hci_fwd_pcr_clp_0,
+ hci_bwd_pcr_clp_0,
+ hci_fwd_pcr_clp_0_1,
+ hci_bwd_pcr_clp_0_1,
+ hci_fwd_scr_clp_0,
+ hci_bwd_scr_clp_0,
+ hci_fwd_scr_clp_0_1,
+ hci_bwd_scr_clp_0_1,
+ hci_fwd_mbs_clp_0,
+ hci_bwd_mbs_clp_0,
+ hci_fwd_mbs_clp_0_1,
+ hci_bwd_mbs_clp_0_1,
+ hci_best_effort_ind = 0,
+ hci_fwd_frame_discard = 0,
+ hci_bwd_frame_discard = 0,
+ hci_tagging_bwd = 0,
+ hci_tagging_fwd = 0,
+ hci_fwd_abr_mcr,
+ hci_bwd_abr_mcr,
+ hci_binary}).
+
+-record(hci_bbc, {hci_pci,
+ hci_bearer_class,
+ hci_atm_transfer_capability,
+ hci_user_plane_connection_configuration,
+ hci_susceptibility_to_clipping,
+ hci_binary}).
+
+-record(hci_cause, {hci_pci,
+ hci_location,
+ hci_cause_value,
+ hci_diagnostics_list = [],
+ hci_binary}).
+
+-record(hci_cpn, {hci_pci,
+ hci_type_of_number,
+ hci_intern_netw_numb_indic,
+ hci_numbering_plan_indicator,
+ hci_number_digits,
+ hci_orig_native = false}).
+
+-record(hci_clpn, {hci_pci,
+ hci_type_of_number,
+ hci_numbering_plan_indicator,
+ hci_presentation_indicator,
+ hci_screening_indicator,
+ hci_number_digits,
+ hci_incomplete_indicator = 0,
+ hci_binary}).
+
+-record(hci_cno, {hci_type_of_number,
+ hci_numbering_plan_indicator,
+ hci_presentation_indicator,
+ hci_screening_indicator,
+ hci_number_digits,
+ hci_binary}).
+
+-record(hci_cnosa, {hci_binary}).
+
+-record(hci_cpn_soft, {hci_select_type,
+ hci_soft_vpi,
+ hci_soft_vci,
+ hci_soft_dlci,
+ hci_binary}).
+
+-record(hci_clpn_soft, {hci_soft_vpi,
+ hci_soft_vci,
+ hci_soft_dlci,
+ hci_binary}).
+
+-record(hci_rer_services, {hci_inter_req_hard,
+ hci_inter_cap_hard,
+ hci_intra_req_soft,
+ hci_intra_req_hard,
+ hci_intra_cap_asym,
+ hci_intra_cap_sym,
+ hci_intra_cap_hard,
+ hci_binary}).
+
+-record(hci_rer, {hci_func_addr,
+ hci_endpoint_key,
+ hci_switchover,
+ hci_incarnation,
+ hci_pnni_cumul_fw_max_cell_td,
+ hci_cumul_fw_p2p_cdv,
+ hci_cumul_bw_p2p_cdv,
+ hci_binary}).
+
+-record(hci_rer_cause, {hci_rer_rel_cause,
+ hci_binary}).
+
+-record(hci_opt_traf, {hci_origin,
+ hci_cumul_fw_aw,
+ hci_cumul_bw_aw,
+ hci_binary}).
+
+-record(hci_qos, {hci_pci,
+ hci_qos_class_fwd,
+ hci_qos_class_bwd,
+ hci_binary}).
+
+-record(hci_aal, {hci_pci,
+ hci_binary}).
+
+-record(hci_bhli, {hci_pci,
+ hci_binary}).
+
+-record(hci_blli_brep, {hci_brep,
+ hci_blli_list = []}).
+
+-record(hci_blli, {hci_binary}).
+
+-record(hci_blli_bici, {hci_repeated,
+ hci_priority,
+ hci_pci,
+ hci_binary}).
+
+-record(hci_cpsa, {hci_pci,
+ hci_binary}).
+
+-record(hci_clpsa, {hci_pci,
+ hci_binary}).
+
+-record(hci_gat, {hci_binary}).
+
+-record(hci_epr, {hci_epr_type,
+ hci_epr_value,
+ hci_epr_flag,
+ hci_binary}).
+
+-record(hci_eqos, {hci_origin,
+ hci_acc_fwd_p2p_cdv,
+ hci_acc_bwd_p2p_cdv,
+ hci_cum_fwd_p2p_cdv,
+ hci_cum_bwd_p2p_cdv,
+ hci_acc_fwd_clr,
+ hci_acc_bwd_clr,
+ hci_binary}).
+
+-record(hci_brep, {hci_binary}).
+
+-record(hci_bsco, {hci_binary}).
+
+-record(hci_noti, {hci_binary}).
+
+-record(hci_abrs, {hci_fwd_abr_icr,
+ hci_bwd_abr_icr,
+ hci_fwd_abr_tbe,
+ hci_bwd_abr_tbe,
+ hci_cum_rm_fix_round_trip,
+ hci_fwd_rif,
+ hci_bwd_rif,
+ hci_fwd_rdf,
+ hci_bwd_rdf,
+ hci_binary}).
+
+-record(hci_abra, {hci_fwd_nrm,
+ hci_fwd_trm,
+ hci_fwd_cdf,
+ hci_fwd_atdf,
+ hci_bwd_nrm,
+ hci_bwd_trm,
+ hci_bwd_cdf,
+ hci_bwd_atdf,
+ hci_binary}).
+
+-record(hci_prog, {hci_coding_std,
+ hci_location,
+ hci_prog_desc,
+ hci_binary}).
+
+-record(hci_nbc_brep, {hci_brep,
+ hci_nbc_list = []}).
+
+-record(hci_nbc, {hci_binary}).
+
+-record(hci_nbhlc, {hci_binary}).
+
+-record(hci_nbllc_brep, {hci_brep,
+ hci_nbllc_list = []}).
+
+-record(hci_nbllc, {hci_binary}).
+
+-record(hci_geidt, {hci_binary}).
+
+-record(hci_conss, {hci_type_of_conn_scope,
+ hci_conn_scope,
+ hci_binary}).
+
+-record(hci_e2etd, {hci_pci,
+ hci_cumul_td,
+ hci_max_td,
+ hci_pnni_cumul_td,
+ hci_pnni_accept_fwd_max_td,
+ hci_netw_gen}).
+
+-record(hci_cdpi, {hci_pci,
+ hci_cdpci,
+ hci_cdpsi,
+ hci_binary}).
+
+-record(hci_cgpc, {hci_pci,
+ hci_binary}).
+
+-record(hci_lpt, {hci_pci,
+ hci_ptype}).
+
+-record(hci_cb, {hci_cb_level,
+ hci_bl_transit_type,
+ hci_bl_node_id,
+ hci_bl_link_proc_node_id,
+ hci_bl_link_port_id,
+ hci_bl_link_succ_node_id,
+ cause_value,
+ hci_cb_diagnostics,
+ hci_binary}).
+
+-record(hci_pa, {hci_ie_id,
+ hci_coding,
+ hci_action,
+ hci_length,
+ hci_binary,
+ hci_error_type}).
+
+-record(hci_ncci, {hci_pci,
+ hci_ni,
+ hci_point_code,
+ hci_call_id}).
+
+-record(hci_ssie, {hci_ssie_sas = [],
+ hci_binary}).
+
+-record(hci_sas, {hci_sas_vsn,
+ hci_sas_transp_ind,
+ hci_sas_flow_ind,
+ hci_sas_discard,
+ hci_sas_scope,
+ hci_sas_relative_id,
+ hci_binary}).
+
+-record(hci_data, {hci_hcid,
+ hci_sender_ifindex,
+ hci_sender_hcid}).
+
+-record(hci_data_setup, {hci_hcidA,
+ hci_pidA,
+ hci_protA,
+ hci_protB,
+ hci_portB,
+ hci_hcidB,
+ hci_rem_dataA,
+ hci_assign,
+ hci_ifindexB,
+ hci_node_id,
+ hci_succ_node_id,
+ hci_ifindexA,
+ hci_vpiA,
+ hci_vciA,
+ hci_cpA,
+ hci_cpB}).
+
+-record(hci_prot_comp, {hci_requiredFC = 0,
+ hci_desiredFC = 0}).
+
+-file("./spvcOrig.erl", 217).
+
+-file("/export/localhome/locmacr/wrk/axd_r11/ATS_CRA12002/SPVC_CNA12164/src/../../../inc/ccCd.hrl", 1).
+
+-hrl_id('13/190 55-CNA 121 101 Ux').
+
+-hrl_vsn('/main/R6A/R7A/R8A/R8B/8').
+
+-hrl_date('2003-02-21').
+
+-hrl_author(etxmexa).
+
+-record(ccCdRR, {hcid,
+ vpi,
+ vci,
+ ifindexA,
+ call_type,
+ spvc = false,
+ reserve = yes,
+ etA,
+ destdata,
+ leafdata,
+ loopdata,
+ l3,
+ l3_loop,
+ cc}).
+
+-record(ccCdRD, {destid,
+ loopdata,
+ cc}).
+
+-record(ccCdRL, {leafid,
+ protTypeB,
+ loopdata,
+ l3,
+ l3_loop,
+ cc}).
+
+-record(ccCdDD, {hcid,
+ hcidA,
+ vpi,
+ vci,
+ ifindexB,
+ portB,
+ call_type,
+ spvc = false,
+ reserve = yes,
+ protTypeA,
+ etB,
+ leafdata,
+ loopdata,
+ l3,
+ l3_loop,
+ cc}).
+
+-record(ccCdDL, {leafid,
+ loopdata,
+ l3,
+ l3_loop,
+ cc}).
+
+-record(ccRR, {protTypeA,
+ remote_dataA,
+ remote_dataB,
+ chg_counters,
+ sc,
+ chg_decision = on,
+ cc_loop}).
+
+-record(ccRL, {hcidB,
+ charging,
+ cc_loop}).
+
+-record(ccRD, {portB,
+ ifindexB,
+ cpB,
+ vpiB,
+ vciB,
+ cc_loop}).
+
+-record(ccDD, {protTypeB,
+ remote_dataA,
+ remote_dataB,
+ ifindexA,
+ cpA,
+ vpiA,
+ vciA,
+ chg_counters,
+ sc,
+ chg_decision = on,
+ cc_loop}).
+
+-record(ccDL, {cc_loop}).
+
+-record(loopRR, {vpList,
+ nodeid,
+ succ_nodeid,
+ connection_type,
+ policing,
+ delay_contrib,
+ charging = on,
+ prev_routing_data}).
+
+-record(loopRD, {}).
+
+-record(loopRL, {msg_rec,
+ providerName,
+ userName,
+ partyId,
+ serviceIfA,
+ serviceIdA,
+ serviceIfB,
+ serviceIdB,
+ estAw,
+ dtlLevels}).
+
+-record(loopDD, {nodeid,
+ succ_nodeid,
+ vpList,
+ connection_type,
+ policing,
+ assign,
+ delay_contrib,
+ charging = on}).
+
+-record(loopDL, {msg_rec,
+ providerName,
+ userName,
+ partyId,
+ serviceIfA,
+ serviceIdA,
+ serviceIfB,
+ serviceIdB}).
+
+-record(ccLoopRR, {pidB,
+ qos,
+ atd,
+ bbc,
+ cscope,
+ e2etd,
+ eqos,
+ con_state = none,
+ con_order = both,
+ mr_flag,
+ catch_up_id,
+ cpA}).
+
+-record(ccLoopRD, {}).
+
+-record(ccLoopRL, {route,
+ linklist,
+ routelist,
+ failurelist = [],
+ nodeidlist,
+ cb,
+ cpn,
+ dtl,
+ routing_state,
+ assign,
+ timer_counter = 0,
+ timer_ref,
+ status_enq_ind,
+ link_CB,
+ node_CB,
+ pnnir_rlp,
+ pnni_only}).
+
+-record(ccLoopDD, {pidA,
+ con_state = none,
+ con_order = both,
+ mr_flag,
+ catch_up_id,
+ cpB}).
+
+-record(ccLoopDL, {timer_counter = 0,
+ timer_ref,
+ status_enq_ind}).
+
+-file("./spvcOrig.erl", 218).
+
+-file("/export/localhome/locmacr/built/lib/erlang/lib/snmp-4.1.2/include/STANDARD-MIB.hrl", 1).
+
+-file("./spvcOrig.erl", 219).
+
+error_handler({From,Tag},{M,F,Args},EXITReason) ->
+ spvcLib:do_report(sccm,M,F,Args,"",EXITReason).
+
+connect(HcId,Connect,Key) ->
+ debug_disabled,
+ Obj = spvcDataBase:db_read({spvcObj,Key}),
+ orig_state_machine(Obj#spvcObj.currentState,connect_nu,Obj,[HcId,Connect]).
+
+release_nu(HcId,Release,Key) ->
+ debug_disabled,
+ Obj = spvcDataBase:db_read({spvcObj,Key}),
+ spvcDataBase:db_delete({spvcHcIdToTp,HcId}),
+ orig_state_machine(Obj#spvcObj.currentState,release_nu,Obj,[HcId,Release]).
+
+release_comp_nu(HcId,Release_comp,Key) ->
+ debug_disabled,
+ Obj = spvcDataBase:db_read({spvcObj,Key}),
+ spvcDataBase:db_delete({spvcHcIdToTp,HcId}),
+ orig_state_machine(Obj#spvcObj.currentState,release_comp_nu,Obj,[HcId,Release_comp]).
+
+release_incumbent(HcId,Release) ->
+ debug_disabled,
+ release_incumbent2(spvcDataBase:db_read({spvcHcIdToTp,HcId}),Release).
+
+release_incumbent2(SpvcHcIdToTp,Release) ->
+ release_incumbent3(SpvcHcIdToTp#spvcHcIdToTp.tpEntry,Release).
+
+release_incumbent3({orig,If,Vpi,Vci,Leaf},Release) ->
+ release_incumbent4({If,Vpi,Vci,Leaf},Release);
+release_incumbent3({orig,If,Vpi,Leaf},Release) ->
+ release_incumbent4({If,Vpi,Leaf},Release).
+
+release_incumbent4(TpKey,Release) ->
+ Spvc = spvcDataBase:db_read({spvcObj,TpKey}),
+ active = Spvc#spvcObj.currentState,
+ orig_state_machine(active,release_incumbent,Spvc,[Release]).
+
+switch_over(HcId,{If,Vpi,Vci}) ->
+ Key = case {If,Vpi,Vci} of
+ {If_Value,Vpi_Value,Vci_Value} when integer(Vci_Value) ->
+ {If_Value,Vpi_Value,Vci_Value,1};
+ {If_Value,Vpi_Value,_} ->
+ {If_Value,Vpi_Value,1};
+ {If_Value,Vpi_Value} ->
+ {If_Value,Vpi_Value,1}
+ end,
+ Spvc = spvcDataBase:db_read({spvcObj,Key}),
+ do_switch_over(HcId,Spvc);
+switch_over(HcId,{If,Vpi}) ->
+ Key = case {If,Vpi,no_vc} of
+ {If_Value,Vpi_Value,Vci_Value} when integer(Vci_Value) ->
+ {If_Value,Vpi_Value,Vci_Value,1};
+ {If_Value,Vpi_Value,_} ->
+ {If_Value,Vpi_Value,1};
+ {If_Value,Vpi_Value} ->
+ {If_Value,Vpi_Value,1}
+ end,
+ Spvc = spvcDataBase:db_read({spvcObj,Key}),
+ do_switch_over(HcId,Spvc).
+
+do_switch_over(HcId,Spvc) ->
+ State = Spvc#spvcObj.currentState,
+ orig_state_machine(State,switch_over,Spvc,[HcId]).
+
+gen_set(Type,Row,Cols) ->
+ debug_disabled,
+ gen_set(Type,Row,Cols,undefined).
+
+gen_set(Type,Row,Cols,FrKey) ->
+ debug_disabled,
+ case lists:keysearch(case {case Row of
+ {_,_,_,_} ->
+ spvcVcc;
+ {_,_,_} ->
+ spvcVpc;
+ {_,_} ->
+ spvcFr;
+ [_,_,_,_] ->
+ spvcVcc;
+ [_,_,_] ->
+ spvcVpc;
+ [_,_] ->
+ spvcFr
+ end,rowStatus} of
+ {spvcVcc,targetAddress} ->
+ 2;
+ {spvcVcc,selectType} ->
+ 3;
+ {spvcVcc,targetVpi} ->
+ 18;
+ {spvcVcc,targetVci} ->
+ 5;
+ {spvcVcc,releaseCause} ->
+ 6;
+ {spvcVcc,releaseDiagnostic} ->
+ 7;
+ {spvcVcc,retryInterval} ->
+ 10;
+ {spvcVcc,retryTimer} ->
+ 11;
+ {spvcVcc,retryThreshold} ->
+ 12;
+ {spvcVcc,retryFailures} ->
+ 13;
+ {spvcVcc,retryLimit} ->
+ 14;
+ {spvcVcc,rowStatus} ->
+ 15;
+ {spvcVcc,restart} ->
+ 9;
+ {spvcVcc,targetSelectType_any} ->
+ 2;
+ {spvcVcc,targetSelectType_required} ->
+ 1;
+ {spvcVpc,targetAddress} ->
+ 2;
+ {spvcVpc,selectType} ->
+ 3;
+ {spvcVpc,targetVpi} ->
+ 15;
+ {spvcVpc,releaseCause} ->
+ 5;
+ {spvcVpc,releaseDiagnostic} ->
+ 6;
+ {spvcVpc,retryInterval} ->
+ 9;
+ {spvcVpc,retryTimer} ->
+ 10;
+ {spvcVpc,retryThreshold} ->
+ 11;
+ {spvcVpc,retryFailures} ->
+ 12;
+ {spvcVpc,retryLimit} ->
+ 13;
+ {spvcVpc,rowStatus} ->
+ 14;
+ {spvcVpc,restart} ->
+ 8;
+ {spvcVpc,targetSelectType_any} ->
+ 2;
+ {spvcVpc,targetSelectType_required} ->
+ 1;
+ {spvcFr,targetAddress} ->
+ 3;
+ {spvcFr,selectType} ->
+ 5;
+ {spvcFr,identifier} ->
+ 6;
+ {spvcFr,targetVpi} ->
+ 7;
+ {spvcFr,targetVci} ->
+ 8;
+ {spvcFr,translation} ->
+ 9;
+ {spvcFr,releaseCause} ->
+ 10;
+ {spvcFr,releaseDiagnostic} ->
+ 11;
+ {spvcFr,operStatus} ->
+ 12;
+ {spvcFr,adminStatus} ->
+ 13;
+ {spvcFr,restart} ->
+ 14;
+ {spvcFr,retryInterval} ->
+ 15;
+ {spvcFr,retryTimer} ->
+ 16;
+ {spvcFr,retryThreshold} ->
+ 17;
+ {spvcFr,retryFailures} ->
+ 18;
+ {spvcFr,retryLimit} ->
+ 19;
+ {spvcFr,lastChange} ->
+ 20;
+ {spvcFr,rowStatus} ->
+ 21
+ end,1,Cols) of
+ {value,{_,4}} ->
+ debug_disabled,
+ mnesia:dirty_update_counter(spvcHcEtStat,spvcLib:get_board(hd(Row)),1),
+ case get_link_state(case Row of
+ Row when record(Row,spvcObj) ->
+ case Row#spvcObj.spvcEntry of
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value
+ end;
+ Row when record(Row,spvcVcc) ->
+ {If_Value,_,_,_} = Row#spvcVcc.spvcVccEntry,
+ If_Value;
+ Row when record(Row,spvcVpc) ->
+ {If_Value,_,_} = Row#spvcVpc.spvcVpcEntry,
+ If_Value;
+ Row when record(Row,spvcVpcPerm) ->
+ {If_Value,_,_} = Row#spvcVpcPerm.spvcVpcEntry,
+ If_Value;
+ Row when record(Row,spvcVccPerm) ->
+ {If_Value,_,_,_} = Row#spvcVccPerm.spvcVccEntry,
+ If_Value;
+ Row when record(Row,spvcTargetVc) ->
+ {If_Value,_,_} = Row#spvcTargetVc.entry,
+ If_Value;
+ Row when record(Row,spvcTargetVp) ->
+ {If_Value,_} = Row#spvcTargetVp.entry,
+ If_Value;
+ Row when record(Row,pchVc) ->
+ {If_Value,_,_} = Row#pchVc.vclEntry,
+ If_Value;
+ Row when record(Row,pchVp) ->
+ {If_Value,_} = Row#pchVp.vplEntry,
+ If_Value;
+ Row when record(Row,spvcFr) ->
+ {If_Value,_} = Row#spvcFr.spvcFrEntry,
+ If_Value;
+ Row when record(Row,spvcFrPerm) ->
+ {If_Value,_} = Row#spvcFrPerm.spvcFrEntry,
+ If_Value;
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value;
+ {If_Value,_} ->
+ If_Value;
+ [If_Value|_] ->
+ If_Value;
+ _ ->
+ error
+ end) of
+ disabled ->
+ orig_state_machine(null,createAndGo_disabled,[],[Row,Cols,Type,FrKey]);
+ enabled ->
+ orig_state_machine(null,createAndGo_enabled,[],[Row,Cols,Type,FrKey])
+ end;
+ {value,{_,5}} ->
+ debug_disabled,
+ mnesia:dirty_update_counter(spvcHcEtStat,spvcLib:get_board(hd(Row)),1),
+ orig_state_machine(null,createAndWait,[],[Row,Cols,Type,FrKey]);
+ {value,{_,1}} ->
+ debug_disabled,
+ case spvcDataBase:db_read({spvcObj,list_to_tuple(Row)}) of
+ [] ->
+ ok;
+ Spvc ->
+ case get_link_state(case Row of
+ Row when record(Row,spvcObj) ->
+ case Row#spvcObj.spvcEntry of
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value
+ end;
+ Row when record(Row,spvcVcc) ->
+ {If_Value,_,_,_} = Row#spvcVcc.spvcVccEntry,
+ If_Value;
+ Row when record(Row,spvcVpc) ->
+ {If_Value,_,_} = Row#spvcVpc.spvcVpcEntry,
+ If_Value;
+ Row when record(Row,spvcVpcPerm) ->
+ {If_Value,_,_} = Row#spvcVpcPerm.spvcVpcEntry,
+ If_Value;
+ Row when record(Row,spvcVccPerm) ->
+ {If_Value,_,_,_} = Row#spvcVccPerm.spvcVccEntry,
+ If_Value;
+ Row when record(Row,spvcTargetVc) ->
+ {If_Value,_,_} = Row#spvcTargetVc.entry,
+ If_Value;
+ Row when record(Row,spvcTargetVp) ->
+ {If_Value,_} = Row#spvcTargetVp.entry,
+ If_Value;
+ Row when record(Row,pchVc) ->
+ {If_Value,_,_} = Row#pchVc.vclEntry,
+ If_Value;
+ Row when record(Row,pchVp) ->
+ {If_Value,_} = Row#pchVp.vplEntry,
+ If_Value;
+ Row when record(Row,spvcFr) ->
+ {If_Value,_} = Row#spvcFr.spvcFrEntry,
+ If_Value;
+ Row when record(Row,spvcFrPerm) ->
+ {If_Value,_} = Row#spvcFrPerm.spvcFrEntry,
+ If_Value;
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value;
+ {If_Value,_} ->
+ If_Value;
+ [If_Value|_] ->
+ If_Value;
+ _ ->
+ error
+ end) of
+ disabled ->
+ orig_state_machine(Spvc#spvcObj.currentState,activate_disabled,Spvc,Cols);
+ enabled ->
+ orig_state_machine(Spvc#spvcObj.currentState,activate_enabled,Spvc,Cols)
+ end
+ end;
+ {value,{_,6}} ->
+ debug_disabled,
+ case spvcDataBase:db_read({spvcObj,list_to_tuple(Row)}) of
+ [] ->
+ ok;
+ Spvc ->
+ mnesia:dirty_update_counter(spvcHcEtStat,spvcLib:get_board(hd(Row)),- 1),
+ orig_state_machine(Spvc#spvcObj.currentState,destroy,Spvc,Cols)
+ end;
+ {value,{_,2}} ->
+ debug_disabled,
+ case spvcDataBase:db_read({spvcObj,list_to_tuple(Row)}) of
+ [] ->
+ mnesia:dirty_update_counter(spvcHcEtStat,spvcLib:get_board(hd(Row)),1),
+ ok;
+ Spvc ->
+ orig_state_machine(Spvc#spvcObj.currentState,not_in_service,Spvc,Cols)
+ end;
+ false ->
+ debug_disabled,
+ Spvc = spvcDataBase:db_read({spvcObj,list_to_tuple(Row)}),
+ CurrentState = Spvc#spvcObj.currentState,
+ NewSpvc = set_attrs(Spvc,Cols),
+ Restart = case {case Row of
+ {_,_,_,_} ->
+ spvcVcc;
+ {_,_,_} ->
+ spvcVpc;
+ {_,_} ->
+ spvcFr;
+ [_,_,_,_] ->
+ spvcVcc;
+ [_,_,_] ->
+ spvcVpc;
+ [_,_] ->
+ spvcFr
+ end,restart} of
+ {spvcVcc,targetAddress} ->
+ 2;
+ {spvcVcc,selectType} ->
+ 3;
+ {spvcVcc,targetVpi} ->
+ 18;
+ {spvcVcc,targetVci} ->
+ 5;
+ {spvcVcc,releaseCause} ->
+ 6;
+ {spvcVcc,releaseDiagnostic} ->
+ 7;
+ {spvcVcc,retryInterval} ->
+ 10;
+ {spvcVcc,retryTimer} ->
+ 11;
+ {spvcVcc,retryThreshold} ->
+ 12;
+ {spvcVcc,retryFailures} ->
+ 13;
+ {spvcVcc,retryLimit} ->
+ 14;
+ {spvcVcc,rowStatus} ->
+ 15;
+ {spvcVcc,restart} ->
+ 9;
+ {spvcVcc,targetSelectType_any} ->
+ 2;
+ {spvcVcc,targetSelectType_required} ->
+ 1;
+ {spvcVpc,targetAddress} ->
+ 2;
+ {spvcVpc,selectType} ->
+ 3;
+ {spvcVpc,targetVpi} ->
+ 15;
+ {spvcVpc,releaseCause} ->
+ 5;
+ {spvcVpc,releaseDiagnostic} ->
+ 6;
+ {spvcVpc,retryInterval} ->
+ 9;
+ {spvcVpc,retryTimer} ->
+ 10;
+ {spvcVpc,retryThreshold} ->
+ 11;
+ {spvcVpc,retryFailures} ->
+ 12;
+ {spvcVpc,retryLimit} ->
+ 13;
+ {spvcVpc,rowStatus} ->
+ 14;
+ {spvcVpc,restart} ->
+ 8;
+ {spvcVpc,targetSelectType_any} ->
+ 2;
+ {spvcVpc,targetSelectType_required} ->
+ 1;
+ {spvcFr,targetAddress} ->
+ 3;
+ {spvcFr,selectType} ->
+ 5;
+ {spvcFr,identifier} ->
+ 6;
+ {spvcFr,targetVpi} ->
+ 7;
+ {spvcFr,targetVci} ->
+ 8;
+ {spvcFr,translation} ->
+ 9;
+ {spvcFr,releaseCause} ->
+ 10;
+ {spvcFr,releaseDiagnostic} ->
+ 11;
+ {spvcFr,operStatus} ->
+ 12;
+ {spvcFr,adminStatus} ->
+ 13;
+ {spvcFr,restart} ->
+ 14;
+ {spvcFr,retryInterval} ->
+ 15;
+ {spvcFr,retryTimer} ->
+ 16;
+ {spvcFr,retryThreshold} ->
+ 17;
+ {spvcFr,retryFailures} ->
+ 18;
+ {spvcFr,retryLimit} ->
+ 19;
+ {spvcFr,lastChange} ->
+ 20;
+ {spvcFr,rowStatus} ->
+ 21
+ end,
+ case lists:keysearch(Restart,1,Cols) of
+ {value,{Restart,1}} ->
+ orig_state_machine(CurrentState,restart,NewSpvc,Cols);
+ _ ->
+ spvcDataBase:db_write(NewSpvc),
+ ok
+ end
+ end,
+ {noError,0}.
+
+restart_spvc(Key) ->
+ debug_disabled,
+ Spvc = spvcDataBase:db_read({spvcObj,Key}),
+ handle_restart_spvc(Spvc#spvcObj.currentState,Spvc),
+ ok.
+
+handle_restart_spvc(rest_in_peace,Spvc) ->
+ debug_disabled,
+ rest_in_peace(restart,Spvc,undefined);
+handle_restart_spvc(_,_) ->
+ ok.
+
+restart_multi_spvcs(Key) ->
+ debug_disabled,
+ Spvc = spvcDataBase:db_read({spvcObj,Key}),
+ handle_restart_multi_spvcs(Spvc#spvcObj.currentState,Spvc),
+ ok.
+
+handle_restart_multi_spvcs(rest_in_peace,Spvc) ->
+ debug_disabled,
+ handle_restart_spvc(rest_in_peace,Spvc);
+handle_restart_multi_spvcs(active,Spvc) ->
+ debug_disabled,
+ active(restart,Spvc,undefined);
+handle_restart_multi_spvcs(outgoing_callproceeding,Spvc) ->
+ debug_disabled,
+ outgoing_callproceeding(restart,Spvc,undefined);
+handle_restart_multi_spvcs(release_at_restart,Spvc) ->
+ debug_disabled,
+ release_at_restart(restart,Spvc,undefined);
+handle_restart_multi_spvcs(wait,Spvc) ->
+ debug_disabled,
+ wait(restart,Spvc,undefined);
+handle_restart_multi_spvcs(rest_in_peace,Spvc) ->
+ debug_disabled,
+ rest_in_peace(restart,Spvc,undefined);
+handle_restart_multi_spvcs(_,_) ->
+ ok.
+
+orig_state_machine(null,createAndGo_enabled,Spvc,Attrs) ->
+ null(createAndGo_enabled,Spvc,Attrs);
+orig_state_machine(null,createAndGo_disabled,Spvc,Attrs) ->
+ null(createAndGo_disabled,Spvc,Attrs);
+orig_state_machine(null,createAndWait,Spvc,Attrs) ->
+ null(createAndWait,Spvc,Attrs);
+orig_state_machine(created,activate_disabled,Spvc,Attrs) ->
+ created(activate_disabled,Spvc,Attrs);
+orig_state_machine(created,activate_enabled,Spvc,Attrs) ->
+ created(activate_enabled,Spvc,Attrs);
+orig_state_machine(created,destroy,Spvc,Attrs) ->
+ created(destroy,Spvc,Attrs);
+orig_state_machine(outgoing_callproceeding,connect_nu,Spvc,Attrs) ->
+ outgoing_callproceeding(connect_nu,Spvc,Attrs);
+orig_state_machine(outgoing_callproceeding,destroy,Spvc,Attrs) ->
+ outgoing_callproceeding(destroy,Spvc,Attrs);
+orig_state_machine(outgoing_callproceeding,restart,Spvc,Attrs) ->
+ outgoing_callproceeding(restart,Spvc,Attrs);
+orig_state_machine(outgoing_callproceeding,release_nu,Spvc,Attrs) ->
+ case get_link_state_intf(case Spvc of
+ Spvc when record(Spvc,spvcObj) ->
+ case Spvc#spvcObj.spvcEntry of
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value
+ end;
+ Spvc when record(Spvc,spvcVcc) ->
+ {If_Value,_,_,_} = Spvc#spvcVcc.spvcVccEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVpc) ->
+ {If_Value,_,_} = Spvc#spvcVpc.spvcVpcEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVpcPerm) ->
+ {If_Value,_,_} = Spvc#spvcVpcPerm.spvcVpcEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVccPerm) ->
+ {If_Value,_,_,_} = Spvc#spvcVccPerm.spvcVccEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcTargetVc) ->
+ {If_Value,_,_} = Spvc#spvcTargetVc.entry,
+ If_Value;
+ Spvc when record(Spvc,spvcTargetVp) ->
+ {If_Value,_} = Spvc#spvcTargetVp.entry,
+ If_Value;
+ Spvc when record(Spvc,pchVc) ->
+ {If_Value,_,_} = Spvc#pchVc.vclEntry,
+ If_Value;
+ Spvc when record(Spvc,pchVp) ->
+ {If_Value,_} = Spvc#pchVp.vplEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcFr) ->
+ {If_Value,_} = Spvc#spvcFr.spvcFrEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcFrPerm) ->
+ {If_Value,_} = Spvc#spvcFrPerm.spvcFrEntry,
+ If_Value;
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value;
+ {If_Value,_} ->
+ If_Value;
+ [If_Value|_] ->
+ If_Value;
+ _ ->
+ error
+ end,release_nu) of
+ disabled ->
+ outgoing_callproceeding(release_nu_disabled,Spvc,Attrs);
+ enabled ->
+ outgoing_callproceeding(release_nu_enabled,Spvc,Attrs)
+ end;
+orig_state_machine(outgoing_callproceeding,release_comp_nu,Spvc,Attrs) ->
+ case get_link_state_intf(tuple_to_list(Spvc#spvcObj.spvcEntry),release_comp_nu) of
+ disabled ->
+ outgoing_callproceeding(release_comp_nu_disabled,Spvc,Attrs);
+ enabled ->
+ outgoing_callproceeding(release_comp_nu_enabled,Spvc,Attrs)
+ end;
+orig_state_machine(outgoing_callproceeding,not_in_service,Spvc,Attrs) ->
+ outgoing_callproceeding(not_in_service,Spvc,Attrs);
+orig_state_machine(outgoing_callproceeding,activate_enabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(outgoing_callproceeding,activate_disabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(active,destroy,Spvc,Attrs) ->
+ active(destroy,Spvc,Attrs);
+orig_state_machine(active,restart,Spvc,Attrs) ->
+ active(restart,Spvc,Attrs);
+orig_state_machine(active,release_nu,Spvc,Attrs) ->
+ case cnhChi:get_link_opstate(case Spvc of
+ Spvc when record(Spvc,spvcObj) ->
+ case Spvc#spvcObj.spvcEntry of
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value
+ end;
+ Spvc when record(Spvc,spvcVcc) ->
+ {If_Value,_,_,_} = Spvc#spvcVcc.spvcVccEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVpc) ->
+ {If_Value,_,_} = Spvc#spvcVpc.spvcVpcEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVpcPerm) ->
+ {If_Value,_,_} = Spvc#spvcVpcPerm.spvcVpcEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVccPerm) ->
+ {If_Value,_,_,_} = Spvc#spvcVccPerm.spvcVccEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcTargetVc) ->
+ {If_Value,_,_} = Spvc#spvcTargetVc.entry,
+ If_Value;
+ Spvc when record(Spvc,spvcTargetVp) ->
+ {If_Value,_} = Spvc#spvcTargetVp.entry,
+ If_Value;
+ Spvc when record(Spvc,pchVc) ->
+ {If_Value,_,_} = Spvc#pchVc.vclEntry,
+ If_Value;
+ Spvc when record(Spvc,pchVp) ->
+ {If_Value,_} = Spvc#pchVp.vplEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcFr) ->
+ {If_Value,_} = Spvc#spvcFr.spvcFrEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcFrPerm) ->
+ {If_Value,_} = Spvc#spvcFrPerm.spvcFrEntry,
+ If_Value;
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value;
+ {If_Value,_} ->
+ If_Value;
+ [If_Value|_] ->
+ If_Value;
+ _ ->
+ error
+ end) of
+ disabled ->
+ active(release_nu_disabled,Spvc,Attrs);
+ enabled ->
+ active(release_nu_enabled,Spvc,Attrs)
+ end;
+orig_state_machine(active,release_comp_nu,Spvc,Attrs) ->
+ release_at_restart(release_comp_nu,Spvc,Attrs);
+orig_state_machine(active,not_in_service,Spvc,Attrs) ->
+ active(not_in_service,Spvc,Attrs);
+orig_state_machine(active,activate_enabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(active,activate_disabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(active,release_incumbent,Spvc,Attrs) ->
+ active(release_incumbent,Spvc,Attrs);
+orig_state_machine(wait,destroy,Spvc,Attrs) ->
+ wait(destroy,Spvc,Attrs);
+orig_state_machine(wait,timeout,Spvc,Attrs) ->
+ wait(timeout,Spvc,Attrs);
+orig_state_machine(wait,restart,Spvc,Attrs) ->
+ wait(restart,Spvc,Attrs);
+orig_state_machine(wait,release_nu,Spvc,Attrs) ->
+ ok;
+orig_state_machine(wait,not_in_service,Spvc,Attrs) ->
+ wait(not_in_service,Spvc,Attrs);
+orig_state_machine(wait,activate_enabled,Spvc,Attrs) ->
+ wait(timeout,Spvc,Attrs);
+orig_state_machine(wait,activate_disabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(release_at_restart,release_comp_nu,Spvc,Attrs) ->
+ release_at_restart(release_comp_nu,Spvc,Attrs);
+orig_state_machine(release_at_restart,release_nu,Spvc,Attrs) ->
+ release_at_restart(release_nu,Spvc,Attrs);
+orig_state_machine(release_at_restart,connect_nu,Spvc,Attrs) ->
+ release_at_restart(connect_nu,Spvc,Attrs);
+orig_state_machine(release_at_restart,destroy,Spvc,Attrs) ->
+ release_at_restart(destroy,Spvc,Attrs);
+orig_state_machine(release_at_restart,not_in_service,Spvc,Attrs) ->
+ release_at_restart(not_in_service,Spvc,Attrs);
+orig_state_machine(release_at_restart,activate_enabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(release_at_restart,activate_disabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(release_request,release_comp_nu,Spvc,Attrs) ->
+ release_request(release_comp_nu,Spvc,Attrs);
+orig_state_machine(release_request,release_nu,Spvc,Attrs) ->
+ release_request(release_nu,Spvc,Attrs);
+orig_state_machine(release_request,destroy,Spvc,Attrs) ->
+ release_request(destroy,Spvc,Attrs);
+orig_state_machine(release_request,not_in_service,Spvc,Attrs) ->
+ release_request(not_in_service,Spvc,Attrs);
+orig_state_machine(release_request,activate_enabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(release_request,activate_disabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(rest_in_peace,restart,Spvc,Attrs) ->
+ rest_in_peace(restart,Spvc,Attrs);
+orig_state_machine(rest_in_peace,destroy,Spvc,Attrs) ->
+ rest_in_peace(destroy,Spvc,Attrs);
+orig_state_machine(rest_in_peace,not_in_service,Spvc,Attrs) ->
+ rest_in_peace(not_in_service,Spvc,Attrs);
+orig_state_machine(rest_in_peace,connect_nu,Spvc,Attrs) ->
+ rest_in_peace(connect_nu,Spvc,Attrs);
+orig_state_machine(rest_in_peace,activate_enabled,Spvc,Attrs) ->
+ rest_in_peace(restart,Spvc,Attrs);
+orig_state_machine(rest_in_peace,activate_disabled,Spvc,Attrs) ->
+ ok;
+orig_state_machine(rest_in_peace,release_nu,Spvc,Attrs) ->
+ ok;
+orig_state_machine(rest_in_peace,release_comp_nu,Spvc,Attrs) ->
+ ok;
+orig_state_machine(not_in_service,activate_enabled,Spvc,Attrs) ->
+ not_in_service(activate_enabled,Spvc,Attrs);
+orig_state_machine(not_in_service,activate_disabled,Spvc,Attrs) ->
+ not_in_service(activate_disabled,Spvc,Attrs);
+orig_state_machine(not_in_service,destroy,Spvc,Attrs) ->
+ not_in_service(destroy,Spvc,Attrs);
+orig_state_machine(not_in_service,connect_nu,Spvc,Attrs) ->
+ not_in_service(connect_nu,Spvc,Attrs);
+orig_state_machine(not_in_service,_,Spvc,Attrs) ->
+ ok;
+orig_state_machine(awaiting_switch_over,switch_over,Spvc,[HcId]) ->
+ awaiting_switch_over(switch_over,Spvc,[HcId]);
+orig_state_machine(awaiting_switch_over,activate_disabled,Spvc,Attrs) ->
+ awaiting_switch_over(activate_disabled,Spvc,Attrs);
+orig_state_machine(awaiting_switch_over,destroy,Spvc,Attrs) ->
+ awaiting_switch_over(destroy,Spvc,Attrs);
+orig_state_machine(awaiting_switch_over,restart,Spvc,Attrs) ->
+ awaiting_switch_over(restart,Spvc,Attrs);
+orig_state_machine(awaiting_switch_over,_,Spvc,Attrs) ->
+ ok;
+orig_state_machine(undefined,destroy,Spvc,Attrs) ->
+ rest_in_peace(destroy,Spvc,Attrs).
+
+null(createAndGo_enabled,[],[Row,Cols,Type,FrKey]) ->
+ debug_disabled,
+ Key = list_to_tuple(Row),
+ Spvc = #spvcObj{spvcEntry = Key,
+ spvcApplication = Type,
+ spvcRowStatus = 1,
+ spvcFrKey = FrKey},
+ Spvc1 = set_attrs(Spvc,Cols),
+ {Spvc2,HcId,Setup} = new_state_outgoing_call_proceeding(Spvc1),
+ pchTpUpdate(case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end),
+ spvcDataBase:db_write(Spvc2),
+ setup(HcId,Setup,Spvc2);
+null(createAndGo_disabled,[],[Row,Cols,Type,FrKey]) ->
+ debug_disabled,
+ case get_link_state_intf(Row,null_createAndGo_disabled) of
+ disabled ->
+ Key = list_to_tuple(Row),
+ Spvc = #spvcObj{spvcEntry = Key,
+ spvcRowStatus = 1,
+ currentState = rest_in_peace,
+ spvcApplication = Type,
+ spvcFrKey = FrKey},
+ Spvc1 = set_attrs(Spvc,Cols),
+ pchTpUpdate(case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end),
+ set_call_failure_data_and_send_spvcFailingAlarm(Key),
+ spvcDataBase:db_write(Spvc1);
+ enabled ->
+ null(createAndGo_enabled,[],[Row,Cols,Type,FrKey])
+ end;
+null(createAndWait,[],[Row,Cols,Type,FrKey]) ->
+ debug_disabled,
+ Key = list_to_tuple(Row),
+ Spvc = #spvcObj{spvcEntry = Key,
+ spvcApplication = Type,
+ spvcFrKey = FrKey},
+ Spvc1 = new_state_created(Spvc,Cols),
+ pchTpUpdate(case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end),
+ spvcDataBase:db_write(Spvc1).
+
+pchTpUpdate({If,Vpi,Vci}) ->
+ spvcDataBase:db_write(#spvcVcDyn{vclEntry = {If,Vpi,Vci},
+ vclCcIdentifier = 0});
+pchTpUpdate({If,Vpi}) ->
+ spvcDataBase:db_write(#spvcVpDyn{vplEntry = {If,Vpi},
+ vplCcIdentifier = 0}).
+
+created(activate_enabled,Spvc,Attrs) ->
+ debug_disabled,
+ Spvc1 = set_attrs(Spvc,Attrs),
+ Spvc2 = Spvc1#spvcObj{spvcRowStatus = 1},
+ {Spvc3,HcId,HciMsg} = new_state_outgoing_call_proceeding(Spvc1),
+ spvcDataBase:db_write(Spvc3),
+ setup(HcId,HciMsg,Spvc3);
+created(activate_disabled,Spvc,Attrs) ->
+ debug_disabled,
+ Spvc1 = set_attrs(Spvc,Attrs),
+ Spvc2 = Spvc1#spvcObj{currentState = rest_in_peace,
+ spvcRowStatus = 1},
+ update_state(Spvc,4),
+ spvcDataBase:db_write(Spvc2);
+created(destroy,Spvc,Attrs) ->
+ debug_disabled,
+ clear(Spvc).
+
+outgoing_callproceeding(connect_nu,Spvc,[HcId,Connect]) ->
+ debug_disabled,
+ Spvc1 = new_state_active(Spvc),
+ case Spvc#spvcObj.spvcTargetSelectType of
+ 2 ->
+ Cpn = Connect#hci_connect.hci_cpn_soft,
+ TargetVpi = Cpn#hci_cpn_soft.hci_soft_vpi,
+ TargetVci = Cpn#hci_cpn_soft.hci_soft_vci,
+ TargetDlci = Cpn#hci_cpn_soft.hci_soft_dlci,
+ Spvc2 = Spvc1#spvcObj{spvcTargetSelectType = 1,
+ spvcTargetVpi = TargetVpi,
+ spvcTargetVci = TargetVci,
+ spvcTargetDlci = TargetDlci},
+ spvcDataBase:db_write(Spvc2);
+ 1 ->
+ spvcDataBase:db_write(ets,Spvc1);
+ 2 ->
+ Cpn = Connect#hci_connect.hci_cpn_soft,
+ TargetVpi = Cpn#hci_cpn_soft.hci_soft_vpi,
+ TargetDlci = Cpn#hci_cpn_soft.hci_soft_dlci,
+ Spvc2 = Spvc1#spvcObj{spvcTargetSelectType = 1,
+ spvcTargetVpi = TargetVpi,
+ spvcTargetDlci = TargetDlci},
+ spvcDataBase:db_write(Spvc2);
+ 1 ->
+ spvcDataBase:db_write(ets,Spvc1)
+ end,
+ Key = Spvc#spvcObj.spvcEntry,
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ SpvcDyn = case PchKey of
+ {_,_,_} ->
+ case spvcDataBase:db_read({spvcVcDyn,PchKey}) of
+ [] ->
+ #spvcVcDyn{vclEntry = PchKey,
+ vclCcIdentifier = 0,
+ vclConnId = HcId};
+ SpvcVcDyn ->
+ SpvcVcDyn#spvcVcDyn{vclEntry = PchKey,
+ vclConnId = HcId}
+ end;
+ {_,_} ->
+ case spvcDataBase:db_read({spvcVpDyn,PchKey}) of
+ [] ->
+ #spvcVpDyn{vplEntry = PchKey,
+ vplCcIdentifier = 0,
+ vplConnId = HcId};
+ SpvcVpDyn ->
+ SpvcVpDyn#spvcVpDyn{vplEntry = PchKey,
+ vplConnId = HcId}
+ end
+ end,
+ spvcDataBase:db_write(SpvcDyn),
+ CbCValue = get(no_of_rerouting),
+ CbC = case CbCValue of
+ undefined ->
+ debug_disabled,
+ 0;
+ _ ->
+ CbCValue
+ end,
+ SpvcDyn2 = case Key of
+ {_,_,_,_} ->
+ case spvcDataBase:db_read({spvcVccDyn,Key}) of
+ [] ->
+ #spvcVccDyn{spvcVccEntry = Key,
+ crankBackCounter = CbC};
+ SpvcVccDyn ->
+ SpvcVccDyn#spvcVccDyn{spvcVccEntry = Key,
+ crankBackCounter = CbC}
+ end;
+ {_,_,_} ->
+ case spvcDataBase:db_read({spvcVpcDyn,Key}) of
+ [] ->
+ #spvcVpcDyn{spvcVpcEntry = Key,
+ crankBackCounter = CbC};
+ SpvcVpcDyn ->
+ SpvcVpcDyn#spvcVpcDyn{spvcVpcEntry = Key,
+ crankBackCounter = CbC}
+ end
+ end,
+ spvcDataBase:db_write(SpvcDyn2),
+ NewPch = spvcDataBase:db_read({pch,PchKey}),
+ spvcLib:clear_spvcStillTryingAlarm(Key),
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ spvcLib:ilmi_change(PchKey,1),
+ ok;
+ FrEndPoint ->
+ SpvcFrObj = spvcDataBase:db_read({spvcFrPerm,FrEndPoint}),
+ NewSpvcFrObj = SpvcFrObj#spvcFrPerm{spvcFrConnect = 3},
+ spvcDataBase:db_write(NewSpvcFrObj),
+ spvcLib:ilmi_change(PchKey,1),
+ set_fr_atm_iw_admin_state(FrEndPoint,up,Spvc)
+ end;
+outgoing_callproceeding(restart,Spvc,_) ->
+ Key = Spvc#spvcObj.spvcEntry,
+ debug_disabled,
+ Spvc1 = new_state_release_at_restart(Spvc),
+ spvcDataBase:db_write(ets,Spvc1),
+ spvcLib:clear_spvcStillTryingAlarm(Key);
+outgoing_callproceeding(release_nu_enabled,Spvc,[HcId,HciMsg]) ->
+ debug_disabled,
+ Spvc1 = new_state_rest_in_peace_or_wait(Spvc,[HcId,HciMsg]),
+ [CcCause|_] = HciMsg#hci_release.hci_cause_list,
+ Spvc2 = Spvc1#spvcObj{spvcLastReleaseCause = CcCause#hci_cause.hci_cause_value,
+ spvcLastReleaseDiagnostic = CcCause#hci_cause.hci_diagnostics_list},
+ spvcDataBase:db_write(ets,Spvc2);
+outgoing_callproceeding(release_nu_disabled,Spvc,[HcId,Release]) ->
+ debug_disabled,
+ Spvc1 = new_state_rest_in_peace(Spvc),
+ [CcCause|_] = Release#hci_release.hci_cause_list,
+ Spvc2 = Spvc1#spvcObj{spvcLastReleaseCause = CcCause#hci_cause.hci_cause_value,
+ spvcLastReleaseDiagnostic = CcCause#hci_cause.hci_diagnostics_list},
+ spvcDataBase:db_write(ets,Spvc2),
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry);
+outgoing_callproceeding(release_comp_nu_enabled,Spvc,[HcId,Release_complete]) ->
+ debug_disabled,
+ Spvc1 = new_state_rest_in_peace_or_wait(Spvc,[HcId,Release_complete]),
+ spvcDataBase:db_write(ets,Spvc1);
+outgoing_callproceeding(release_comp_nu_disabled,Spvc,[HcId,Release_complete]) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_rest_in_peace(Spvc),
+ spvcDataBase:db_write(ets,Spvc1),
+ spvcLib:clear_spvcStillTryingAlarm(Key);
+outgoing_callproceeding(destroy,Spvc,_) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_release_request(Spvc),
+ spvcDataBase:db_write(ets,Spvc1),
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(a_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc1),
+ spvcLib:clear_spvcStillTryingAlarm(Key);
+outgoing_callproceeding(not_in_service,Spvc,_) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_not_in_service(Spvc),
+ spvcDataBase:db_write(Spvc1),
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(a_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc1),
+ spvcLib:clear_spvcStillTryingAlarm(Key).
+
+active(restart,Spvc,_) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_release_at_restart(Spvc),
+ spvcDataBase:db_write(ets,Spvc1),
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ spvcLib:ilmi_change(PchKey,2),
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ ok;
+ FrEndPoint ->
+ set_fr_atm_iw_admin_state(FrEndPoint,down,Spvc)
+ end;
+active(release_nu_enabled,Spvc,[HcId,Release]) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_rest_in_peace_or_wait(Spvc,[HcId,Release]),
+ [CcCause|_] = Release#hci_release.hci_cause_list,
+ Spvc2 = Spvc1#spvcObj{spvcLastReleaseCause = CcCause#hci_cause.hci_cause_value,
+ spvcLastReleaseDiagnostic = CcCause#hci_cause.hci_diagnostics_list},
+ spvcDataBase:db_write(ets,Spvc2),
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ spvcLib:ilmi_change(PchKey,2),
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ ok;
+ FrEndPoint ->
+ set_fr_atm_iw_admin_state(FrEndPoint,down,Spvc)
+ end;
+active(release_nu_disabled,Spvc,[HcId,Release]) ->
+ debug_disabled,
+ case get_link_state_intf(case Spvc of
+ Spvc when record(Spvc,spvcObj) ->
+ case Spvc#spvcObj.spvcEntry of
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value
+ end;
+ Spvc when record(Spvc,spvcVcc) ->
+ {If_Value,_,_,_} = Spvc#spvcVcc.spvcVccEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVpc) ->
+ {If_Value,_,_} = Spvc#spvcVpc.spvcVpcEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVpcPerm) ->
+ {If_Value,_,_} = Spvc#spvcVpcPerm.spvcVpcEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcVccPerm) ->
+ {If_Value,_,_,_} = Spvc#spvcVccPerm.spvcVccEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcTargetVc) ->
+ {If_Value,_,_} = Spvc#spvcTargetVc.entry,
+ If_Value;
+ Spvc when record(Spvc,spvcTargetVp) ->
+ {If_Value,_} = Spvc#spvcTargetVp.entry,
+ If_Value;
+ Spvc when record(Spvc,pchVc) ->
+ {If_Value,_,_} = Spvc#pchVc.vclEntry,
+ If_Value;
+ Spvc when record(Spvc,pchVp) ->
+ {If_Value,_} = Spvc#pchVp.vplEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcFr) ->
+ {If_Value,_} = Spvc#spvcFr.spvcFrEntry,
+ If_Value;
+ Spvc when record(Spvc,spvcFrPerm) ->
+ {If_Value,_} = Spvc#spvcFrPerm.spvcFrEntry,
+ If_Value;
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value;
+ {If_Value,_} ->
+ If_Value;
+ [If_Value|_] ->
+ If_Value;
+ _ ->
+ error
+ end,active_release_nu_disabled) of
+ disabled ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = Spvc#spvcObj{currentState = rest_in_peace},
+ [CcCause|_] = Release#hci_release.hci_cause_list,
+ Spvc2 = Spvc1#spvcObj{spvcLastReleaseCause = CcCause#hci_cause.hci_cause_value,
+ spvcLastReleaseDiagnostic = CcCause#hci_cause.hci_diagnostics_list},
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ spvcLib:ilmi_change(PchKey,2),
+ update_state(Spvc,4),
+ spvcDataBase:db_write(ets,Spvc2),
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ ok;
+ FrEndPoint ->
+ set_fr_atm_iw_admin_state(FrEndPoint,down,Spvc)
+ end;
+ enabled ->
+ active(release_nu_enabled,Spvc,[HcId,Release])
+ end;
+active(destroy,Spvc,_) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_release_request(Spvc),
+ spvcDataBase:db_write(ets,Spvc1),
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ spvcLib:ilmi_change(PchKey,2),
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(a_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc);
+active(not_in_service,Spvc,_) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_not_in_service(Spvc),
+ spvcDataBase:db_write(Spvc1),
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ spvcLib:ilmi_change(PchKey,2),
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(a_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc1),
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ ok;
+ FrEndPoint ->
+ set_fr_atm_iw_admin_state(FrEndPoint,down,Spvc)
+ end;
+active(release_incumbent,Spvc,[Release]) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_awaiting_switch_over(Spvc),
+ spvcDataBase:db_write(Spvc1),
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ spvcManager:release_un(a_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc1).
+
+read_spvcTpToHcId({If,Vpi,Vci,Leaf}) ->
+ spvcDataBase:db_read({spvcTpToHcId,{orig,If,Vpi,Vci,Leaf}});
+read_spvcTpToHcId({If,Vpi,Leaf}) ->
+ spvcDataBase:db_read({spvcTpToHcId,{orig,If,Vpi,Leaf}}).
+
+release_request(release_nu,Spvc,[HcId,Release]) ->
+ debug_disabled,
+ clear(Spvc);
+release_request(release_comp_nu,Spvc,[HcId,Release_comp]) ->
+ debug_disabled,
+ clear(Spvc);
+release_request(destroy,Spvc,_) ->
+ debug_disabled,
+ case Spvc#spvcObj.spvcEntry of
+ {If,Vpi,Vci,Leaf} ->
+ case spvcDataBase:db_read({spvcTpToHcId,{orig,If,Vpi,Vci,Leaf}}) of
+ SpvcTpToHcId ->
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(a_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc),
+ clear(Spvc);
+ _ ->
+ ok
+ end;
+ {If,Vpi,Leaf} ->
+ case spvcDataBase:db_read({spvcTpToHcId,{orig,If,Vpi,Leaf}}) of
+ SpvcTpToHcId ->
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(a_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc),
+ clear(Spvc);
+ _ ->
+ ok
+ end
+ end,
+ ok;
+release_request(not_in_service,Spvc,_) ->
+ debug_disabled,
+ ok.
+
+release_at_restart(release_nu,Spvc,[HcId,Release]) ->
+ debug_disabled,
+ {Spvc1,NewHcId,Setup} = new_state_outgoing_call_proceeding(Spvc),
+ [CcCause|_] = Release#hci_release.hci_cause_list,
+ Spvc2 = Spvc1#spvcObj{spvcLastReleaseCause = CcCause#hci_cause.hci_cause_value,
+ spvcLastReleaseDiagnostic = CcCause#hci_cause.hci_diagnostics_list},
+ spvcDataBase:db_write(ets,Spvc2),
+ timer:sleep(500),
+ setup(NewHcId,Setup,Spvc2);
+release_at_restart(release_comp_nu,Spvc,[HcId,Release_complete]) ->
+ debug_disabled,
+ {Spvc1,NewHcId,Setup} = new_state_outgoing_call_proceeding(Spvc),
+ Spvc2 = Spvc1#spvcObj{spvcLastReleaseCause = 31,
+ spvcLastReleaseDiagnostic = []},
+ spvcDataBase:db_write(ets,Spvc2),
+ timer:sleep(500),
+ setup(NewHcId,Setup,Spvc1);
+release_at_restart(connect_nu,Spvc,_) ->
+ debug_disabled,
+ ok;
+release_at_restart(destroy,Spvc,_) ->
+ debug_disabled,
+ Spvc1 = new_state_release_request(Spvc),
+ spvcDataBase:db_write(ets,Spvc1);
+release_at_restart(restart,Spvc,_) ->
+ debug_disabled,
+ Spvc1 = new_state_release_at_restart(Spvc);
+release_at_restart(not_in_service,Spvc,_) ->
+ debug_disabled,
+ Spvc1 = new_state_not_in_service(Spvc),
+ spvcDataBase:db_write(Spvc1).
+
+wait(timeout,Spvc,_) ->
+ debug_disabled,
+ {Spvc1,HcId,Setup} = new_state_outgoing_call_proceeding(Spvc),
+ spvcDataBase:db_write(ets,Spvc1),
+ setup(HcId,Setup,Spvc1);
+wait(destroy,Spvc,_) ->
+ debug_disabled,
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry),
+ clear(Spvc);
+wait(restart,Spvc,_) ->
+ debug_disabled,
+ {Spvc1,HcId,Setup} = new_state_outgoing_call_proceeding(Spvc#spvcObj{spvcRetryFailures = 0}),
+ spvcDataBase:db_write(ets,Spvc1),
+ spvcReestablishTimer:cancel(Spvc#spvcObj.spvcEntry),
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry),
+ setup(HcId,Setup,Spvc1);
+wait(not_in_service,Spvc,_) ->
+ debug_disabled,
+ Spvc1 = new_state_not_in_service(Spvc),
+ spvcDataBase:db_write(Spvc1),
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry).
+
+rest_in_peace(restart,Spvc,_) ->
+ debug_disabled,
+ {Spvc1,HcId,Setup} = new_state_outgoing_call_proceeding(Spvc#spvcObj{spvcRetryFailures = 0}),
+ spvcDataBase:db_write(ets,Spvc1),
+ setup(HcId,Setup,Spvc1),
+ sccmManager:cast_to_sccm(spvcLib:get_cp(om_node),spvcLib,clear_spvcFailingAlarm,[spvcLib:get_membership(node())]);
+rest_in_peace(destroy,Spvc,_) ->
+ debug_disabled,
+ sccmManager:cast_to_sccm(spvcLib:get_cp(om_node),spvcLib,clear_spvcFailingAlarm,[spvcLib:get_membership(node())]),
+ clear(Spvc);
+rest_in_peace(connect_nu,Spvc,_) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(b_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc);
+rest_in_peace(not_in_service,Spvc,_) ->
+ debug_disabled,
+ Spvc1 = new_state_not_in_service(Spvc),
+ spvcDataBase:db_write(Spvc1),
+ sccmManager:cast_to_sccm(spvcLib:get_cp(om_node),spvcLib,clear_spvcFailingAlarm,[spvcLib:get_membership(node())]).
+
+not_in_service(activate_enabled,Spvc,_) ->
+ debug_disabled,
+ {Spvc1,HcId,Setup} = new_state_outgoing_call_proceeding(Spvc#spvcObj{spvcRetryFailures = 0}),
+ spvcDataBase:db_write(Spvc1#spvcObj{spvcRowStatus = 1}),
+ setup(HcId,Setup,Spvc1);
+not_in_service(activate_disabled,Spvc,_) ->
+ debug_disabled,
+ Spvc1 = new_state_rest_in_peace(Spvc),
+ spvcDataBase:db_write(Spvc1#spvcObj{spvcRowStatus = 1});
+not_in_service(connect_nu,Spvc,_) ->
+ debug_disabled,
+ Spvc1 = new_state_rest_in_peace(Spvc),
+ spvcDataBase:db_write(Spvc1#spvcObj{spvcRowStatus = 1}),
+ Key = Spvc#spvcObj.spvcEntry,
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(b_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc1);
+not_in_service(destroy,Spvc,_) ->
+ debug_disabled,
+ clear(Spvc).
+
+awaiting_switch_over(switch_over,Spvc,[HcId]) ->
+ debug_disabled,
+ Spvc1 = Spvc#spvcObj{currentState = active},
+ Index = Spvc#spvcObj.spvcEntry,
+ TpIndex = create_tp_index(Index),
+ spvcDataBase:db_write(Spvc1),
+ ets:insert(spvcTpToHcId,#spvcTpToHcId{tpEntry = TpIndex,
+ hcId = HcId}),
+ ets:insert(spvcHcIdToTp,#spvcHcIdToTp{tpEntry = TpIndex,
+ hcId = HcId}),
+ update_dyn_table_hcid(Index,HcId),
+ ok;
+awaiting_switch_over(activate_disabled,Spvc,Attrs) ->
+ Spvc1 = new_state_rest_in_peace(Spvc),
+ spvcDataBase:db_write(Spvc1),
+ ok;
+awaiting_switch_over(restart,Spvc,Attrs) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Spvc1 = new_state_release_at_restart(Spvc),
+ spvcDataBase:db_write(ets,Spvc1),
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ spvcLib:ilmi_change(PchKey,2),
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ ok;
+ FrEndPoint ->
+ set_fr_atm_iw_admin_state(FrEndPoint,down,Spvc)
+ end;
+awaiting_switch_over(destroy,Spvc,Attrs) ->
+ clear(Spvc).
+
+create_tp_index({If,Vpi,Vci,Leaf}) ->
+ list_to_tuple([orig,If,Vpi,Vci,Leaf]);
+create_tp_index({If,Vpi,Leaf}) ->
+ list_to_tuple([orig,If,Vpi,Leaf]).
+
+update_dyn_table_hcid({If,Vpi,Vci,Leaf},HcId) ->
+ [VcDyn] = ets:lookup(spvcVcDyn,{If,Vpi,Vci}),
+ ets:insert(spvcVcDyn,VcDyn#spvcVcDyn{vclConnId = HcId});
+update_dyn_table_hcid({If,Vpi,Leaf},HcId) ->
+ [VpDyn] = ets:lookup(spvcVpDyn,{If,Vpi}),
+ ets:insert(spvcVpDyn,VpDyn#spvcVpDyn{vplConnId = HcId}).
+
+new_state_outgoing_call_proceeding(Spvc) ->
+ debug_disabled,
+ Spvc1 = Spvc#spvcObj{spvcRowStatus = 1,
+ currentState = outgoing_callproceeding},
+ Key = Spvc1#spvcObj.spvcEntry,
+ update_state(Spvc,outgoing_callproceeding),
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ {FwdAtdIndex,BwdAtdIndex} = case PchKey of
+ {_,_,_} ->
+ Vc = spvcDataBase:db_read({pchVc,PchKey}),
+ {Vc#pchVc.vclReceiveTrafficDescrIndex,Vc#pchVc.vclTransmitTrafficDescrIndex};
+ {_,_} ->
+ Vp = spvcDataBase:db_read({pchVp,PchKey}),
+ {Vp#pchVp.vplReceiveTrafficDescrIndex,Vp#pchVp.vplTransmitTrafficDescrIndex}
+ end,
+ FwdPchAtd = spvcDataBase:db_read({pchAtd,FwdAtdIndex}),
+ BwdPchAtd = spvcDataBase:db_read({pchAtd,BwdAtdIndex}),
+ Row = tuple_to_list(Key),
+ HcId = spvcLib:create_hcid(Row,case Row of
+ Row when record(Row,spvcObj) ->
+ case Row#spvcObj.spvcEntry of
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value
+ end;
+ Row when record(Row,spvcVcc) ->
+ {If_Value,_,_,_} = Row#spvcVcc.spvcVccEntry,
+ If_Value;
+ Row when record(Row,spvcVpc) ->
+ {If_Value,_,_} = Row#spvcVpc.spvcVpcEntry,
+ If_Value;
+ Row when record(Row,spvcVpcPerm) ->
+ {If_Value,_,_} = Row#spvcVpcPerm.spvcVpcEntry,
+ If_Value;
+ Row when record(Row,spvcVccPerm) ->
+ {If_Value,_,_,_} = Row#spvcVccPerm.spvcVccEntry,
+ If_Value;
+ Row when record(Row,spvcTargetVc) ->
+ {If_Value,_,_} = Row#spvcTargetVc.entry,
+ If_Value;
+ Row when record(Row,spvcTargetVp) ->
+ {If_Value,_} = Row#spvcTargetVp.entry,
+ If_Value;
+ Row when record(Row,pchVc) ->
+ {If_Value,_,_} = Row#pchVc.vclEntry,
+ If_Value;
+ Row when record(Row,pchVp) ->
+ {If_Value,_} = Row#pchVp.vplEntry,
+ If_Value;
+ Row when record(Row,spvcFr) ->
+ {If_Value,_} = Row#spvcFr.spvcFrEntry,
+ If_Value;
+ Row when record(Row,spvcFrPerm) ->
+ {If_Value,_} = Row#spvcFrPerm.spvcFrEntry,
+ If_Value;
+ {If_Value,_,_,_} ->
+ If_Value;
+ {If_Value,_,_} ->
+ If_Value;
+ {If_Value,_} ->
+ If_Value;
+ [If_Value|_] ->
+ If_Value;
+ _ ->
+ error
+ end),
+ Setup = spvcEncode:encode_cc_setup(Row,Spvc1,FwdPchAtd,BwdPchAtd),
+ debug_disabled,
+ debug_disabled,
+ debug_disabled,
+ {Spvc1,HcId,Setup}.
+
+new_state_release_request(Spvc) ->
+ debug_disabled,
+ update_state(Spvc,release_request),
+ Spvc#spvcObj{currentState = release_request}.
+
+new_state_release_at_restart(Spvc) ->
+ debug_disabled,
+ Spvc1 = Spvc#spvcObj{spvcRetryFailures = 0,
+ currentState = release_at_restart},
+ update_state(Spvc,release_at_restart),
+ HcId = spvcEncode:encode_cc_hcid(Spvc1#spvcObj.spvcEntry),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(a_side,HcId,Release,Spvc1),
+ Spvc1.
+
+new_state_rest_in_peace_or_wait(Spvc,[HcId,HciMsg]) ->
+ debug_disabled,
+ Spvc1 = Spvc#spvcObj{spvcRetryFailures = Spvc#spvcObj.spvcRetryFailures + 1},
+ case check_limits(Spvc1) of
+ {ok,ok,no_retries} ->
+ send_spvcFailingAlarm(Spvc#spvcObj.spvcEntry),
+ update_state(Spvc,4),
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry),
+ Spvc1#spvcObj{currentState = rest_in_peace};
+ {ok,ok,_} ->
+ Spvc2 = Spvc1#spvcObj{spvcRetryTimer = time(),
+ currentState = wait},
+ update_state(Spvc,wait),
+ start_timer(wait,Spvc2),
+ Spvc2;
+ {retry_threshold,ok,no_retries} ->
+ Spvc2 = Spvc1#spvcObj{currentState = rest_in_peace},
+ update_state(Spvc,4),
+ send_call_failure(Spvc),
+ send_spvcFailingAlarm(Spvc#spvcObj.spvcEntry),
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry),
+ Spvc2;
+ {retry_threshold,ok,_} ->
+ Spvc2 = Spvc1#spvcObj{spvcRetryTimer = time(),
+ currentState = wait},
+ update_state(Spvc,wait),
+ send_call_failure(Spvc2),
+ start_timer(wait,Spvc2),
+ Spvc2;
+ {ok,retry_limit,_} ->
+ send_spvcFailingAlarm(Spvc#spvcObj.spvcEntry),
+ update_state(Spvc,4),
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry),
+ Spvc1#spvcObj{currentState = rest_in_peace};
+ {retry_threshold,retry_limit,_} ->
+ Spvc2 = Spvc1#spvcObj{currentState = rest_in_peace},
+ update_state(Spvc,4),
+ send_call_failure(Spvc2),
+ send_spvcFailingAlarm(Spvc#spvcObj.spvcEntry),
+ spvcLib:clear_spvcStillTryingAlarm(Spvc#spvcObj.spvcEntry),
+ Spvc2
+ end.
+
+send_call_failure(Spvc) ->
+ case Spvc#spvcObj.spvcRetryThreshold of
+ 0 ->
+ ok;
+ _ ->
+ sccmManager:cast_to_sccm(spvcLib:get_cp(om_node),spvcOrig,call_failure,[Spvc])
+ end.
+
+new_state_rest_in_peace(Spvc) ->
+ debug_disabled,
+ update_state(Spvc,4),
+ Spvc1 = Spvc#spvcObj{spvcRetryFailures = Spvc#spvcObj.spvcRetryFailures + 1},
+ send_spvcFailingAlarm(Spvc#spvcObj.spvcEntry),
+ case check_limits(Spvc1) of
+ {ok,_,_} ->
+ Spvc1#spvcObj{currentState = rest_in_peace};
+ {retry_threshold,_,_} ->
+ Spvc2 = Spvc1#spvcObj{currentState = rest_in_peace},
+ case Spvc2#spvcObj.spvcRetryThreshold of
+ 0 ->
+ ok;
+ _ ->
+ sccmManager:cast_to_sccm(spvcLib:get_cp(om_node),spvcOrig,call_failure,[Spvc2])
+ end,
+ Spvc2
+ end.
+
+new_state_active(Spvc) ->
+ debug_disabled,
+ update_state(Spvc,3),
+ Spvc#spvcObj{spvcRetryFailures = 0,
+ currentState = active}.
+
+new_state_created(Spvc,SetCols) ->
+ debug_disabled,
+ update_state(Spvc,created),
+ case spvcSNMP:is_all_values(case Spvc#spvcObj.spvcEntry of
+ {_,_,_,_} ->
+ spvcVcc;
+ {_,_,_} ->
+ spvcVpc;
+ {_,_} ->
+ spvcFr;
+ [_,_,_,_] ->
+ spvcVcc;
+ [_,_,_] ->
+ spvcVpc;
+ [_,_] ->
+ spvcFr
+ end,SetCols) of
+ true ->
+ Spvc1 = Spvc#spvcObj{spvcRowStatus = 2,
+ currentState = created},
+ set_attrs(Spvc1,SetCols);
+ false ->
+ Spvc1 = Spvc#spvcObj{spvcRowStatus = 3,
+ currentState = created},
+ set_attrs(Spvc1,SetCols)
+ end.
+
+new_state_not_in_service(Spvc) ->
+ debug_disabled,
+ update_state(Spvc,not_in_service),
+ Spvc#spvcObj{currentState = not_in_service,
+ spvcRowStatus = 2}.
+
+new_state_awaiting_switch_over(Spvc) ->
+ debug_disabled,
+ Spvc#spvcObj{currentState = awaiting_switch_over}.
+
+update_state(Spvc,NewState) ->
+ State = Spvc#spvcObj.currentState,
+ SpvcEntry = Spvc#spvcObj.spvcEntry,
+ debug_disabled,
+ spvcLib:update_state({State,SpvcEntry},NewState).
+
+send_spvcFailingAlarm(Key) ->
+ debug_disabled,
+ rpc:cast(spvcLib:get_cp(om_node),spvcLib,send_spvcFailingAlarm,[Key]).
+
+set_call_failure_data_and_send_spvcFailingAlarm({If,Vpi,Leaf}) ->
+ debug_disabled,
+ Spvc = spvcDataBase:db_read({spvcObj,{If,Vpi,Leaf}}),
+ if
+ Spvc == [] ->
+ ok;
+ true ->
+ spvcLib:update_state({Spvc#spvcObj.currentState,{If,Vpi,Leaf}},4)
+ end;
+set_call_failure_data_and_send_spvcFailingAlarm({If,Vpi,Vci,Leaf}) ->
+ debug_disabled,
+ Spvc = spvcDataBase:db_read({spvcObj,{If,Vpi,Vci,Leaf}}),
+ if
+ Spvc == [] ->
+ ok;
+ true ->
+ spvcLib:update_state({Spvc#spvcObj.currentState,{If,Vpi,Vci,Leaf}},4)
+ end.
+
+set_attrs(Spvc,SetCols) ->
+ case Spvc#spvcObj.spvcEntry of
+ {_,_,_,_} ->
+ set_attrs_spvcc(Spvc,SetCols);
+ {_,_,_} ->
+ set_attrs_spvpc(Spvc,SetCols)
+ end.
+
+set_attrs_spvcc(Spvc,[{2,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetAddress = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{3,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetSelectType = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{18,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetVpi = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{4,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetVpi = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{5,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetVci = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{6,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcLastReleaseCause = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{7,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcLastReleaseDiagnostic = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{10,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryInterval = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{11,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryTimer = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{12,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryThreshold = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{13,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryFailures = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{14,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryLimit = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{16,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetDlci = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[{17,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetType = Value},
+ set_attrs_spvcc(Spvc1,T);
+set_attrs_spvcc(Spvc,[_|T]) ->
+ set_attrs_spvcc(Spvc,T);
+set_attrs_spvcc(Spvc,[]) ->
+ debug_disabled,
+ Spvc.
+
+set_attrs_spvpc(Spvc,[{2,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetAddress = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{3,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetSelectType = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{15,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetVpi = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{4,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcTargetVpi = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{5,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcLastReleaseCause = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{6,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcLastReleaseDiagnostic = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{9,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryInterval = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{10,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryTimer = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{11,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryThreshold = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{12,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryFailures = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[{13,Value}|T]) ->
+ Spvc1 = Spvc#spvcObj{spvcRetryLimit = Value},
+ set_attrs_spvpc(Spvc1,T);
+set_attrs_spvpc(Spvc,[_|T]) ->
+ set_attrs_spvpc(Spvc,T);
+set_attrs_spvpc(Spvc,[]) ->
+ Spvc.
+
+call_failure(Spvc) ->
+ debug_disabled,
+ Key = case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ spvcLib:update_counter(callFailures,1,spvcLib:get_membership(node())),
+ atm_spvc;
+ _ ->
+ spvcLib:update_counter(callFrFailures,1,spvcLib:get_membership(node())),
+ fr_spvc
+ end,
+ Obj = spvcDataBase:db_read({spvcFailures,Key}),
+ case Obj#spvcFailures.spvcCallFailuresTrapEnable of
+ 1 ->
+ EventIndObj = spvcDataBase:db_read({spvcEventIndicator,Key}),
+ case EventIndObj#spvcEventIndicator.spvcTimerInd of
+ 1 ->
+ spvcDataBase:db_write(EventIndObj#spvcEventIndicator{spvcSendEventInd = 1}),
+ NI = Obj#spvcFailures.spvcNotificationInterval,
+ sysTimer:apply_after(1000 * NI,spvcOrig,timeout_event,[EventIndObj]);
+ _ ->
+ spvcManager:send_event(Key),
+ NI = Obj#spvcFailures.spvcNotificationInterval,
+ sysTimer:apply_after(1000 * NI,spvcManager,timeout,[Key]),
+ spvcDataBase:db_write(EventIndObj#spvcEventIndicator{spvcTimerInd = 1,
+ spvcSendEventInd = 2})
+ end;
+ _ ->
+ ok
+ end.
+
+timeout_event(EventIndObj) ->
+ spvcDataBase:db_write(EventIndObj#spvcEventIndicator{spvcTimerInd = 2}).
+
+check_limits(Spvc) ->
+ debug_disabled,
+ T = Spvc#spvcObj.spvcRetryThreshold,
+ L = Spvc#spvcObj.spvcRetryLimit,
+ F = Spvc#spvcObj.spvcRetryFailures,
+ I = Spvc#spvcObj.spvcRetryInterval,
+ {check_threshold(F,T),check_limit(F,L),check_interval(I)}.
+
+check_threshold(Failures,Threshold) when Failures == Threshold ->
+ debug_disabled,
+ retry_threshold;
+check_threshold(Failures,Threshold) ->
+ debug_disabled,
+ ok.
+
+check_limit(Failures,0) ->
+ debug_disabled,
+ ok;
+check_limit(Failures,Limit) when Failures < Limit ->
+ debug_disabled,
+ ok;
+check_limit(Failures,Limit) ->
+ debug_disabled,
+ retry_limit.
+
+check_interval(0) ->
+ no_retries;
+check_interval(I) ->
+ I.
+
+start_timer(wait,Spvc) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ Id = spvcReestablishTimer:apply_after(backoff_delay(Key),spvcServer,cast_to_spvc,[node(),spvcOrig,timeout,[wait,Key]]).
+
+timeout(wait,Key) ->
+ debug_disabled,
+ case spvcDataBase:db_read({spvcObj,Key}) of
+ [] ->
+ debug_disabled,
+ ok;
+ Spvc ->
+ case Spvc#spvcObj.currentState of
+ wait ->
+ IfIndex = element(1,Key),
+ case spvcOam:is_reassign_et_in_progress(IfIndex) of
+ true ->
+ ok;
+ _ ->
+ orig_state_machine(wait,timeout,Spvc,[])
+ end;
+ _ ->
+ ok
+ end
+ end;
+timeout(X,Y) ->
+ debug_disabled,
+ ok.
+
+clear(Spvc) ->
+ debug_disabled,
+ Key = Spvc#spvcObj.spvcEntry,
+ PchKey = case Key of
+ {IfIndex_Value,Vpi_Value,Vci_Value,_} ->
+ {IfIndex_Value,Vpi_Value,Vci_Value};
+ {IfIndex_Value,Vpi_Value,_} ->
+ {IfIndex_Value,Vpi_Value};
+ [IfIndex_Value,Vpi_Value,Vci_Value,_] ->
+ [IfIndex_Value,Vpi_Value,Vci_Value];
+ [IfIndex_Value,Vpi_Value,_] ->
+ [IfIndex_Value,Vpi_Value]
+ end,
+ spvcEndPoint:free_tp_spvc(PchKey),
+ spvcDataBase:db_delete({spvcObj,Key}),
+ update_state(Spvc,clear),
+ OrigKey = list_to_tuple([orig] ++ tuple_to_list(Key)),
+ case Spvc#spvcObj.currentState of
+ created ->
+ ok;
+ _ ->
+ case spvcDataBase:db_read({spvcTpToHcId,OrigKey}) of
+ [] ->
+ ok;
+ #spvcTpToHcId{hcId = HcId} ->
+ spvcDataBase:db_delete({spvcHcIdToTp,HcId})
+ end,
+ ets:delete(spvcTpToHcId,OrigKey),
+ spvcReestablishTimer:cancel(Key),
+ ets:delete(spvcBackoff,Spvc#spvcObj.spvcEntry)
+ end,
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ sccmManager:cast_to_sccm(spvcLib:get_cp(om_node),spvcEndPoint,remove_tp,[tuple_to_list(PchKey)]);
+ FrKey ->
+ spvcFr:clean_up(FrKey)
+ end,
+ case {Spvc#spvcObj.spvcRerCap,Spvc#spvcObj.spvcEntry} of
+ {false,_} ->
+ ok;
+ {true,Entry} when size(Entry) == 3 ->
+ spvcDataBase:db_delete({spvcRerVp,Entry});
+ {true,Entry} when size(Entry) == 4 ->
+ spvcDataBase:db_delete({spvcRerVc,Entry})
+ end.
+
+get_link_state(If) when integer(If) ->
+ debug_disabled,
+ cnhChi:get_link_opstate(If);
+get_link_state(Other) ->
+ debug_disabled,
+ disabled.
+
+get_link_state_intf(If,Msg) when integer(If) ->
+ debug_disabled,
+ case cnhChi:get_link_opstate(If) of
+ enabled ->
+ enabled;
+ _ ->
+ Om_Node = spvcLib:get_cp(om_node),
+ case rpc:call(Om_Node,intfI,get_link_op_state,[If]) of
+ {ok,enabled} ->
+ enabled;
+ Result ->
+ disabled
+ end
+ end;
+get_link_state_intf(Other,Msg) ->
+ debug_disabled,
+ disabled.
+
+setup(HcId,Setup,Spvc) ->
+ case spvcDataBase:db_read({spvcObj,Spvc#spvcObj.spvcEntry}) of
+ [] ->
+ ok;
+ Spvc1 ->
+ case Spvc#spvcObj.currentState == Spvc1#spvcObj.currentState of
+ true ->
+ spvcLib:increase_counter(spvcSaEtStat,Spvc),
+ case Spvc#spvcObj.spvcFrKey of
+ undefined ->
+ do_setup(HcId,Setup,Spvc#spvcObj.spvcRerCap);
+ FrKey ->
+ do_setup(HcId,Setup,FrKey)
+ end;
+ _ ->
+ ok
+ end
+ end.
+
+do_setup(HcId,Setup,Type) when Type == undefined; Type == false ->
+ debug_disabled,
+ ReturnData = {0,HcId},
+ L3Data = {0,[HcId,Setup]},
+ mdisp:msg(node(),{plcOperator,1,infinity},{HcId,{spvcI,ReturnData}},{ccI,l3_msg,[HcId,spvcI,L3Data]});
+do_setup(HcId,Setup,true) ->
+ debug_disabled,
+ ReturnData = {0,HcId},
+ L3Data = {0,[HcId,Setup]},
+ mdisp:msg(node(),{plcOperator,1,infinity},{HcId,{spvcRerI,ReturnData}},{ccI,l3_msg,[HcId,spvcRerI,L3Data]});
+do_setup(HcId,Setup,FrKey) ->
+ debug_disabled,
+ ReturnData = {0,HcId},
+ L3Data = {0,[HcId,Setup]},
+ mdisp:msg(node(),{plcOperator,1,infinity},{HcId,{spvcFrI,ReturnData}},{ccI,l3_msg,[HcId,spvcFrI,L3Data]}).
+
+backoff_delay(Key) ->
+ debug_disabled,
+ Obj = spvcDataBase:db_read({spvcObj,Key}),
+ Var = spvcDataBase:db_read({spvcFailures,atm_spvc}),
+ {Delay,Flag} = case Obj#spvcObj.spvcRetryFailures of
+ 0 ->
+ {100,no_alarm};
+ 1 ->
+ {Obj#spvcObj.spvcRetryInterval,no_alarm};
+ _ ->
+ Table = get_backoff_table(Key,Obj),
+ Max_Delay = Var#spvcFailures.max_delay,
+ case Var#spvcFailures.delay_factor * Table#spvcBackoff.delay_time of
+ DelayValue when DelayValue < Max_Delay ->
+ {DelayValue,no_alarm};
+ _ ->
+ Org_Retry_Interval = Obj#spvcObj.spvcRetryInterval,
+ if
+ Org_Retry_Interval < Max_Delay ->
+ spvcLib:send_spvcStillTryingAlarm(Key,Table#spvcBackoff.flag),
+ {Max_Delay,alarm};
+ true ->
+ spvcLib:send_spvcStillTryingAlarm(Key,Table#spvcBackoff.flag),
+ {Org_Retry_Interval,alarm}
+ end
+ end
+ end,
+ ets:insert(spvcBackoff,#spvcBackoff{key = Key,
+ delay_time = Delay,
+ flag = Flag}),
+ round(Delay).
+
+get_backoff_table(Index,Spvc) ->
+ case ets:lookup(spvcBackoff,Index) of
+ [Obj] ->
+ Obj;
+ _ ->
+ #spvcBackoff{key = Spvc#spvcObj.spvcEntry,
+ delay_time = Spvc#spvcObj.spvcRetryInterval,
+ flag = no_alarm}
+ end.
+
+set_fr_atm_iw_admin_state(FrEndPoint,up,Spvc) ->
+ ok;
+set_fr_atm_iw_admin_state(FrEndPoint,NewStatus,Spvc) ->
+ ok.
+
+forced_release(FrEndPoint) ->
+ FrPerm = spvcDataBase:db_read({spvcFr,FrEndPoint}),
+ case FrPerm of
+ [] ->
+ {error,no_fr_spvc};
+ _ ->
+ Key = FrPerm#spvcFr.spvcFrAtmEntry,
+ Spvc = spvcDataBase:db_read({spvcObj,Key}),
+ SpvcFrObj = spvcDataBase:db_read({spvcFrPerm,FrEndPoint}),
+ case SpvcFrObj#spvcFrPerm.spvcFrConnect of
+ 3 ->
+ SpvcTpToHcId = read_spvcTpToHcId(Key),
+ Release = spvcEncode:encode_cc_release(31),
+ spvcManager:release_un(b_side,SpvcTpToHcId#spvcTpToHcId.hcId,Release,Spvc);
+ _ ->
+ {error,target_not_owned_by_this_connection}
+ end
+ end.
+
+
+
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/wdp.hrl b/lib/dialyzer/test/user_tests_SUITE_data/src/wdp.hrl
new file mode 100644
index 0000000000..fa0e8af8c7
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/wdp.hrl
@@ -0,0 +1,97 @@
+
+%%
+%% WAP Port Number Definitions (WDP Appendix B.)
+%%
+
+-define(WAP_PORT_WTA_CL_SEC, 2805).
+-define(WAP_PORT_WTA_CO_SEC, 2923).
+-define(WAP_PORT_PUSH_CL, 2948).
+-define(WAP_PORT_PUSH_CL_SEC, 2949).
+
+-define(WAP_PORT_CL, 9200).
+-define(WAP_PORT_CO, 9201).
+-define(WAP_PORT_CL_SEC, 9202).
+-define(WAP_PORT_CO_SEC, 9203).
+-define(WAP_PORT_VCARD, 9204).
+-define(WAP_PORT_VCAL, 9205).
+-define(WAP_PORT_VCARD_SEC, 9206).
+-define(WAP_PORT_VCAL_SEC, 9207).
+
+-define(WAP_PORT_RINGTONE, 5505).
+-define(WAP_PORT_OPER_LOGO, 5506).
+-define(WAP_PORT_CLI_LOGO, 5507).
+
+%%
+%% WDP Bearer Type Assignments (WDP Appendix C.)
+%%
+
+%%
+%% Names after the tag WAP_BEARER_ is [network]_[bearer_type]_[address_type]
+%%
+-define(WAP_BEARER_ANY_ANY_IPV4, 16#00).
+-define(WAP_BEARER_ANY_ANY_IPV6, 16#01).
+-define(WAP_BEARER_GSM_USSD_ANY, 16#02).
+-define(WAP_BEARER_GSM_SMS_GSMMSISDN, 16#03).
+-define(WAP_BEARER_ANSI136_GUTS_ANSI136MSISDN, 16#04).
+-define(WAP_BEARER_IS95CDMA_SMS_IS637MSISDN, 16#05).
+-define(WAP_BEARER_IS95CDMA_CSD_IPV4, 16#06).
+-define(WAP_BEARER_IS95CDMA_PACKETDATA_IPV4, 16#07).
+-define(WAP_BEARER_ANSI136_CSD_IPV4, 16#08).
+-define(WAP_BEARER_ANSI136_PACKETDATA_IPV4, 16#09).
+-define(WAP_BEARER_GSM_CSD_IPV4, 16#0a).
+-define(WAP_BEARER_GSM_GPRS_IPV4, 16#0b).
+-define(WAP_BEARER_GSM_USSD_IPV4, 16#0c).
+-define(WAP_BEARER_AMPS_CDPD_IPV4, 16#0d).
+-define(WAP_BEARER_PDC_CSD_IPV4, 16#0e).
+-define(WAP_BEARER_PDC_PACKETDATA_IPV4, 16#0f).
+-define(WAP_BEARER_IDEN_SMS_IDENMSISDN, 16#10).
+-define(WAP_BEARER_IDEN_CSD_IPV4, 16#11).
+-define(WAP_BEARER_IDEN_PACKETDATA_IPV4, 16#12).
+-define(WAP_BEARER_PAGINGNETWORK_FLEX_FLEXMSISDN, 16#13).
+-define(WAP_BEARER_PHS_SMS_PHSMSISDN, 16#14).
+-define(WAP_BEARER_PHS_CSD_IPV4, 16#15).
+-define(WAP_BEARER_GSM_USSD_GSMSERVICECODE, 16#16).
+-define(WAP_BEARER_TETRA_SDS_TETRAITSI, 16#17).
+-define(WAP_BEARER_TETRA_SDS_TETRAMSISDN, 16#18).
+-define(WAP_BEARER_TETRA_PACKETDATA_IPV4, 16#19).
+-define(WAP_BEARER_PAGINGNETWORK_REFLEX_REFLEXMSISDN, 16#1a).
+-define(WAP_BEARER_GSM_USSD_GSMMSISDN, 16#1b).
+-define(WAP_BEARER_MOBITEX_MPAK_MAN, 16#1c).
+-define(WAP_BEARER_ANSI136_GHOST_GSMMSISDN, 16#1d).
+
+-record(wdp_address,
+ {
+ bearer,
+ address,
+ portnum
+ }).
+
+-record(wdp_sap_info,
+ {
+ mtu, %% max transmission unit (bytes)
+ mru %% max receive unit (bytes)
+ }).
+
+%%
+%% Source and destination address are wdp_addresses
+%%
+-record(wdp_socket_pair,
+ {
+ source,
+ destination
+ }).
+
+-record(wdp_local_port,
+ {
+ port, %% wdp "socket"
+ sap, %% source address
+ user, %% WDP user process
+ monitor %% monitor on WDP user
+ }).
+
+-record(wdp_local_sap,
+ {
+ sap, %% source address
+ port %% wdp "socket"
+ }).
+
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/wsp.hrl b/lib/dialyzer/test/user_tests_SUITE_data/src/wsp.hrl
new file mode 100644
index 0000000000..8190bd6f6f
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/wsp.hrl
@@ -0,0 +1,242 @@
+
+%% WSP Table 34. PDU Type Assignments
+%%
+
+-define(WSP_Connect, 16#01).
+-define(WSP_ConnectReply, 16#02).
+-define(WSP_Redirect, 16#03).
+-define(WSP_Reply, 16#04).
+-define(WSP_Disconnect, 16#05).
+-define(WSP_Push, 16#06).
+-define(WSP_ConfirmedPush, 16#07).
+-define(WSP_Suspend, 16#08).
+-define(WSP_Resume, 16#09).
+
+-define(WSP_Get, 16#40).
+-define(WSP_Options, 16#41).
+-define(WSP_Head, 16#42).
+-define(WSP_Delete, 16#43).
+-define(WSP_Trace, 16#44).
+
+-define(WSP_Post, 16#60).
+-define(WSP_Put, 16#61).
+
+-define(WSP_DataFragmentPDU, 16#80).
+
+%%
+%% WSP Table 37. Capability Assignments
+%%
+
+-define(WSP_CAP_CLIENT_SDU_SIZE, 16#00).
+-define(WSP_CAP_SERVER_SDU_SIZE, 16#01).
+-define(WSP_CAP_PROTOCOL_OPTIONS, 16#02).
+-define(WSP_CAP_METHOD_MOR, 16#03).
+-define(WSP_CAP_PUSH_MOR, 16#04).
+-define(WSP_CAP_EXTENDED_METHODS, 16#05).
+-define(WSP_CAP_HEADER_CODE_PAGES, 16#06).
+-define(WSP_CAP_ALIASES, 16#07).
+-define(WSP_CAP_CLIENT_MESSAGE_SIZE, 16#08).
+-define(WSP_CAP_SERVER_MESSAGE_SIZE, 16#09).
+
+-define(WSP_CODEPAGE_1, 1).
+-define(WSP_DEFAULT_CODEPAGE, ?WSP_CODEPAGE_1).
+
+-define(ANY_LANGUAGE,128).
+
+-define(WSP_10, {1,0}).
+-define(WSP_11, {1,1}).
+-define(WSP_12, {1,2}).
+-define(WSP_13, {1,3}).
+-define(WSP_14, {1,4}).
+-define(WSP_15, {1,5}).
+
+-define(WSP_COMPLIENT_VERSION, ?WSP_15).
+-define(WSP_DEFAULT_VERSION, ?WSP_12).
+
+-define(WSP_STATUS_CONTINUE, 100).
+-define(WSP_STATUS_SWITCHING_PROTOCOLS, 101).
+-define(WSP_STATUS_OK, 200).
+-define(WSP_STATUS_CREATED, 201).
+-define(WSP_STATUS_ACCEPTED, 202).
+-define(WSP_STATUS_NON_AUTHORITATIVE_INFORMATION, 203).
+-define(WSP_STATUS_NO_CONTENT, 204).
+-define(WSP_STATUS_RESET_CONTENT, 205).
+-define(WSP_STATUS_PARTIAL_CONTENT, 206).
+-define(WSP_STATUS_MULTIPLE_CHOICES, 300).
+-define(WSP_STATUS_MOVED_PERMANENTLY, 301).
+-define(WSP_STATUS_MOVED_TEMPORARILY, 302).
+-define(WSP_STATUS_SEE_OTHER, 303).
+-define(WSP_STATUS_NOT_MODIFIED, 304).
+-define(WSP_STATUS_USE_PROXY, 305).
+-define(WSP_STATUS_RESERVED, 306).
+-define(WSP_STATUS_TEMPORARY_REDIRECT, 307).
+-define(WSP_STATUS_BAD_REQUEST, 400).
+-define(WSP_STATUS_UNAUTHORIZED, 401).
+-define(WSP_STATUS_PAYMENT_REQUIRED, 402).
+-define(WSP_STATUS_FORBIDDEN, 403).
+-define(WSP_STATUS_NOT_FOUND, 404).
+-define(WSP_STATUS_METHOD_NOT_ALLOWED, 405).
+-define(WSP_STATUS_NOT_ACCEPTABLE, 406).
+-define(WSP_STATUS_PROXY_AUTHENTICATION_REQUIRED, 407).
+-define(WSP_STATUS_REQUEST_TIMEOUT, 408).
+-define(WSP_STATUS_CONFLICT, 409).
+-define(WSP_STATUS_GONE, 410).
+-define(WSP_STATUS_LENGTH_REQUIRED, 411).
+-define(WSP_STATUS_PRECONDITION_FAILED, 412).
+-define(WSP_STATUS_REQUEST_ENTITY_TOO_LARGE, 413).
+-define(WSP_STATUS_REQUEST_URI_TOO_LARGE, 414).
+-define(WSP_STATUS_UNSUPPORTED_MEDIA_TYPE, 415).
+-define(WSP_STATUS_REQUESTED_RANGE_NOT_SATISFIABLE, 416).
+-define(WSP_STATUS_EXPECTATION_FAILED, 417).
+-define(WSP_STATUS_INTERNAL_SERVER_ERROR, 500).
+-define(WSP_STATUS_NOT_IMPLEMENTED, 501).
+-define(WSP_STATUS_BAD_GATEWAY, 502).
+-define(WSP_STATUS_SERVICE_UNAVAILABLE, 503).
+-define(WSP_STATUS_GATEWAY_TIMEOUT, 504).
+-define(WSP_STATUS_HTTP_VERSION_NOT_SUPPORTED, 505).
+
+-define(ENCODE_SHORT(X), <<1:1, (X):7>>).
+
+-define(ENCODE_LONG(X),
+ if (X) =< 16#ff -> <<1, (X):8>>;
+ (X) =< 16#ffff -> <<2, (X):16>>;
+ (X) =< 16#ffffff -> <<3, (X):24>>;
+ (X) =< 16#ffffffff -> <<4, (X):32>>;
+ true -> encode_long1(X)
+ end).
+
+
+-record(wsp_session,
+ {
+ id, %% uniq session id
+ ref, %% address quadruple (socketpair)
+ state=null, %% connected, suspended
+ version, %% encoding version to use
+ capabilities, %% client capabilities
+ headers %% client hop-by-hop headers!!!
+ }).
+
+-record(wsp_header,
+ {
+ name, %% field name
+ value, %% field value (binary value)
+ params=[] %% field params [{Name,Value} | Value]
+ }).
+
+-record(wsp_multipart_entry,
+ {
+ content_type, %% #wsp_header
+ headers=[],
+ data=(<<>>)
+ }).
+
+-record(wsp_capabilities,
+ {
+ aliases=[], %% [#wdp_address]
+ client_sdu_size=1400,
+ extended_methods=[], %% [{PduType, Name}]
+ header_code_pages=[], %% [{Page,Name}] | [Page]
+ protocol_options=[], %% [push,confirmed_push,resume,
+ %% acknowledgement_headers]
+ method_mor = 10, %% 1?
+ push_mor = 10, %% 1?
+ server_sdu_size=1400,
+ client_message_size,
+ server_message_size,
+ unknown=[]
+ }).
+
+%% WSP PDU records
+
+-record(wsp_connect,
+ {
+ version, %% protocol version, not wsp version?
+ capabilities,
+ headers
+ }).
+
+-record(wsp_connect_reply,
+ {
+ server_session_id,
+ capabilities,
+ headers=[]
+ }).
+
+-define(WSP_PERMANENT_REDIRECT, 16#80).
+-define(WSP_REUSE_SECURITY, 16#40).
+
+-record(wsp_redirect,
+ {
+ flags=[],
+ addresses=[]
+ }).
+
+-record(wsp_disconnect,
+ {
+ server_session_id
+ }).
+
+-record(wsp_get,
+ {
+ type,
+ uri,
+ headers=[]
+ }).
+
+-record(wsp_post,
+ {
+ type,
+ uri,
+ content_type, %% #wsp_header
+ headers=[],
+ data
+ }).
+
+-record(wsp_reply,
+ {
+ status,
+ content_type, %% #wsp_header
+ headers=[],
+ data
+ }).
+
+-record(wsp_data_fragment_pdu,
+ {
+ headers=[],
+ data
+ }).
+
+-record(wsp_push,
+ {
+ type = push,
+ content_type, %% #wsp_header
+ headers=[],
+ data
+ }).
+
+-record(wsp_suspend,
+ {
+ session_id
+ }).
+
+-record(wsp_resume,
+ {
+ session_id,
+ capabilities,
+ headers
+ }).
+
+%% NOTE: not a real pdu
+-record(wsp_acknowledgement_headers,
+ {
+ headers=[]
+ }).
+
+-record(wsp_unknown_pdu,
+ {
+ type, %% integer
+ data %% the payload
+ }).
+
+
+
diff --git a/lib/dialyzer/test/user_tests_SUITE_data/src/wsp_pdu.erl b/lib/dialyzer/test/user_tests_SUITE_data/src/wsp_pdu.erl
new file mode 100644
index 0000000000..596a2f63ac
--- /dev/null
+++ b/lib/dialyzer/test/user_tests_SUITE_data/src/wsp_pdu.erl
@@ -0,0 +1,5423 @@
+%%%=======================================================================
+%%% File : wsp_pdu.erl
+%%% Author : Tony Rogvall <[email protected]>
+%%% Description : WSP PDU
+%%% Created : 18 Aug 2003 by <[email protected]>
+%%%=======================================================================
+%%%
+%%% There are a couple of bugs in this file. Some are detected by
+%%% Dialyzer v1.1 starting both from byte code and from source, some
+%%% other ones are detected only starting from sourse, while some
+%%% others go unnoticed (these are identified by "BUG" below). It is
+%%% expected that at least some of them are detected when the new type
+%%% analysis is integrated into Dialyzer. Some other ones, like the
+%%% one with the unused _Acc argument are harder to detect and might
+%%% require different techniques.
+%%%
+%%%=======================================================================
+
+-module(wsp_pdu).
+-export([encode/1, encode/2, decode/1, decode/2]).
+
+%% The following is just to suppress unused function warnings
+-export([decode_address/1, decode_header/2,
+ decode_headers/1, decode_mms_version/1, decode_multipart/1,
+ encode_headers/1, encode_mms_version/1, encode_multipart/1,
+ encode_language/1, encode_short_integer/1,
+ fmt_current_date/0,
+ format_header/1, format_headers/1,
+ parse_header/1, format/1]).
+
+-include("wsp.hrl").
+-include("wdp.hrl").
+
+-ifdef(debug).
+-define(dbg(Fmt,Args), io:format(Fmt, Args)).
+-else.
+-define(dbg(Fmt,Args), ok).
+-endif.
+
+-define(WARN(Cond, Message),
+ if (Cond) ->
+ io:format("Warning: ~s\n", [(Message)]);
+ true ->
+ ok
+ end).
+
+
+format(Pdu) ->
+ if record(Pdu, wsp_connect) ->
+ fmt(Pdu, record_info(fields, wsp_connect));
+ record(Pdu, wsp_connect_reply) ->
+ fmt(Pdu, record_info(fields, wsp_connect_reply));
+ record(Pdu, wsp_redirect) ->
+ fmt(Pdu, record_info(fields, wsp_redirect));
+ record(Pdu, wsp_disconnect) ->
+ fmt(Pdu, record_info(fields, wsp_disconnect));
+ record(Pdu, wsp_get) ->
+ fmt(Pdu, record_info(fields, wsp_get));
+ record(Pdu, wsp_post) ->
+ fmt(Pdu, record_info(fields, wsp_post));
+ record(Pdu,wsp_reply) ->
+ fmt(Pdu, record_info(fields, wsp_reply));
+ record(Pdu,wsp_data_fragment_pdu) ->
+ fmt(Pdu, record_info(fields, wsp_data_fragment_pdu));
+ record(Pdu,wsp_push) ->
+ fmt(Pdu, record_info(fields, wsp_push));
+ record(Pdu, wsp_suspend) ->
+ fmt(Pdu, record_info(fields, wsp_suspend));
+ record(Pdu, wsp_resume) ->
+ fmt(Pdu, record_info(fields, wsp_resume));
+ record(Pdu, wsp_unknown_pdu) ->
+ fmt(Pdu, record_info(fields, wsp_unknown_pdu))
+ end.
+
+fmt(Pdu, Fs) ->
+ [Name | Vs] = tuple_to_list(Pdu),
+ lists:flatten(["\n",atom_to_list(Name)," {\n" , fmt1(Fs, Vs), "\n}"]).
+
+fmt1([F|Fs],[V|Vs]) ->
+ [io_lib:format(" ~s: ~s;\n", [F,fmt_value(V)]) | fmt1(Fs, Vs)];
+fmt1([], []) ->
+ "".
+
+fmt_value(V) when binary(V) -> "#Bin";
+fmt_value(V) -> lists:flatten(io_lib:format("~p",[V])).
+
+
+%%
+%% Wsp pdu encoder
+%%
+encode(Pdu) ->
+ encode(Pdu, ?WSP_DEFAULT_VERSION).
+
+encode(Pdu, Version) ->
+ ?dbg("encode pdu using encoding version ~p\n", [Version]),
+ Enc = encode1(Pdu, Version),
+ ?dbg("pdu: ~p\nreversed pdu: ~p\n",
+ [Pdu, decode(Enc, Version)]),
+ Enc.
+
+
+encode1(Pdu, Version) ->
+ case Pdu of
+ #wsp_connect_reply {server_session_id=ServerSessionId,
+ capabilities=Capabilities,
+ headers=Headers} ->
+ EncServerSessionId = e_uintvar(ServerSessionId),
+ EncCapabilities = encode_capabilities(Capabilities),
+ EncCapabilitiesLength = e_uintvar(size(EncCapabilities)),
+ EncHeaders = encode_headers(Headers,Version),
+ EncHeadersLength = e_uintvar(size(EncHeaders)),
+ <<?WSP_ConnectReply,
+ EncServerSessionId/binary,
+ EncCapabilitiesLength/binary, EncHeadersLength/binary,
+ EncCapabilities/binary, EncHeaders/binary>>;
+
+ #wsp_reply{ status=Status,
+ content_type=ContentType,
+ headers=Headers,
+ data=Data} ->
+ EncStatus = encode_status_code(Status),
+ EncContentType = encode_content_type(ContentType,Version),
+ EncHeaders = encode_headers(Headers,Version),
+ EncHeadersLength = e_uintvar(size(EncContentType)+
+ size(EncHeaders)),
+ <<?WSP_Reply,
+ EncStatus:8,
+ EncHeadersLength/binary,
+ EncContentType/binary,
+ EncHeaders/binary,
+ Data/binary>>;
+
+ #wsp_post{type=Type, uri=URI, content_type=ContentType,
+ headers=Headers, data=Data} ->
+ %% WSP_Post, WSP_Put
+ PDUType = encode_pdu_type(Type),
+ UriLength = e_uintvar(length(URI)),
+ EncContentType = encode_content_type(ContentType,Version),
+ EncHeaders = encode_headers(Headers,Version),
+ EncHeadersLength = e_uintvar(size(EncContentType)+
+ size(EncHeaders)),
+ %% FIXME
+ <<PDUType:8,
+ UriLength/binary,
+ EncHeadersLength/binary,
+ (list_to_binary(URI))/binary,
+ EncContentType/binary,
+ EncHeaders/binary,
+ Data/binary>>;
+
+ #wsp_push{type=Type, content_type=ContentType,
+ headers=Headers, data=Data} ->
+ %% WSP_Push, WSP_ConfirmedPush
+ PDUType = encode_pdu_type(Type),
+ EncContentType = encode_content_type(ContentType,Version),
+ EncHeaders = encode_headers(Headers,Version),
+ ?dbg("Version ~p Headers ~p", [Version, Headers]),
+ ?dbg("EncHeaders ~p", [EncHeaders]),
+ EncHeadersLength = e_uintvar(size(EncContentType)+
+ size(EncHeaders)),
+ ?dbg("EncCT = ~w ~w", [ContentType, EncContentType]),
+ ?dbg("EncHL = ~w", [EncHeadersLength]),
+ <<PDUType:8,
+ EncHeadersLength/binary,
+ EncContentType/binary,
+ EncHeaders/binary,
+ Data/binary>>;
+
+ #wsp_get{type=Type, uri=URI, headers=Headers} ->
+ %% WSP_Get, WSP_Options, WSP_Head, WSP_Delete, WSP_Trace
+ PDUType = encode_pdu_type(Type),
+ UriLength = length(URI),
+ EncHeaders = encode_headers(Headers,Version),
+ <<PDUType:8,
+ (e_uintvar(UriLength))/binary,
+ (list_to_binary(URI))/binary,
+ EncHeaders/binary>>;
+
+ #wsp_redirect { flags = Flags, addresses = Addrs } ->
+ Flg = lists:foldl(fun(permanent,F) ->
+ ?WSP_PERMANENT_REDIRECT bor F;
+ (resue, F) ->
+ ?WSP_REUSE_SECURITY bor F
+ end, 0, Flags),
+ EncAddr = encode_addresses(Addrs),
+ <<?WSP_Redirect, Flg:8, EncAddr/binary >>;
+
+
+ #wsp_data_fragment_pdu { headers=Headers, data=Data } ->
+ EncHeaders = encode_headers(Headers,Version),
+ << ?WSP_DataFragmentPDU, EncHeaders/binary, Data/binary >>
+ end.
+
+decode(Data) ->
+ decode(Data, ?WSP_COMPLIENT_VERSION).
+
+decode(Data0, Version) ->
+ case Data0 of
+ <<?WSP_Connect:8,PduVersion:8,D0/binary>> ->
+ %% 8.2.2.1
+ {CapabilitiesLen,D1} = d_uintvar(D0),
+ {HeadersLen,D2} = d_uintvar(D1),
+ {Capabilities,D3} = split_binary(D2, CapabilitiesLen),
+ Caps = decode_capabilities(Capabilities,#wsp_capabilities{}),
+ {Headers,D4} = split_binary(D3, HeadersLen),
+ DecHeaders = decode_headers(Headers, Version),
+ ?WARN(D4 =/= <<>>, "Connect pdu contains trailing data"),
+ %% FIXME: warn when D4 is not <<>>
+ #wsp_connect{ version = PduVersion,
+ capabilities=Caps,
+ headers = DecHeaders };
+
+ <<?WSP_ConnectReply:8,D0/binary>> ->
+ %% 8.2.2.2
+ {ServerSessionId,D1} = d_uintvar(D0),
+ {CapabilitiesLen,D2} = d_uintvar(D1),
+ {HeadersLen,D3} = d_uintvar(D2),
+ {Capabilities,D4} = split_binary(D3, CapabilitiesLen),
+ Caps = decode_capabilities(Capabilities,#wsp_capabilities{}),
+ {Headers,D5} = split_binary(D4, HeadersLen),
+ DecHeaders = decode_headers(Headers, Version),
+ ?WARN(D5 =/= <<>>, "ConnectReply pdu contains trailing data"),
+ #wsp_connect_reply{server_session_id=ServerSessionId,
+ capabilities=Caps,
+ headers=DecHeaders};
+
+ <<?WSP_Redirect:8,Flg:8,D0/binary>> ->
+ Flags =
+ if Flg band ?WSP_PERMANENT_REDIRECT =/= 0 -> [permanent];
+ true -> []
+ end ++
+ if Flg band ?WSP_REUSE_SECURITY =/= 0 -> [security];
+ true -> []
+ end,
+ Addrs = decode_addresses(D0),
+ %% 8.2.2.3 Redirect
+ #wsp_redirect{flags=Flags,addresses=Addrs};
+
+
+ <<?WSP_Disconnect:8,D0/binary>> ->
+ %% 8.2.2.4 Disconnect
+ {ServerSessionId,_D1} = d_uintvar(D0),
+ #wsp_disconnect{server_session_id=ServerSessionId};
+
+ <<?WSP_Get:8,D0/binary>> ->
+ {URILength, D1} = d_uintvar(D0),
+ <<UriData:URILength/binary,D2/binary>> = D1,
+ Hs = decode_headers(D2, Version),
+ #wsp_get{type='GET',uri=binary_to_list(UriData),headers=Hs };
+
+ <<?WSP_Options:8,D0/binary>> ->
+ {URILength, D1} = d_uintvar(D0),
+ <<UriData:URILength/binary,D2/binary>> = D1,
+ Hs = decode_headers(D2, Version),
+ #wsp_get{type='OPTIONS',uri=binary_to_list(UriData),headers=Hs };
+
+ <<?WSP_Head:8,D0/binary>> ->
+ {URILength, D1} = d_uintvar(D0),
+ <<UriData:URILength/binary,D2/binary>> = D1,
+ Hs = decode_headers(D2, Version),
+ #wsp_get{type='HEAD',uri=binary_to_list(UriData),headers=Hs };
+
+ <<?WSP_Delete:8,D0/binary>> ->
+ {URILength, D1} = d_uintvar(D0),
+ <<UriData:URILength/binary,D2/binary>> = D1,
+ Hs = decode_headers(D2, Version),
+ #wsp_get{type='DELETE',uri=binary_to_list(UriData),headers=Hs };
+
+ <<?WSP_Trace:8,D0/binary>> ->
+ {URILength, D1} = d_uintvar(D0),
+ <<UriData:URILength/binary,D2/binary>> = D1,
+ Hs = decode_headers(D2, Version),
+ #wsp_get{type='TRACE',uri=binary_to_list(UriData),headers=Hs };
+
+ %% 8.2.3.2 Post
+ <<?WSP_Post:8,D0/binary>> ->
+ {URILen, D1} = d_uintvar(D0),
+ {HL0, D2} = d_uintvar(D1),
+ <<UriData:URILen/binary,D3/binary>> = D2,
+ {FieldData,D4} = scan_header_data(D3),
+ HL1 = (HL0-(size(D3)-size(D4))),
+ <<D5:HL1/binary,Data/binary>> = D4,
+ ContentType = decode_content_type(FieldData, Version),
+ Headers = decode_headers(D5, Version),
+ #wsp_post{ type='POST', uri=binary_to_list(UriData),
+ content_type=ContentType, headers=Headers, data=Data};
+
+ <<?WSP_Put:8,D0/binary>> ->
+ {URILen, D1} = d_uintvar(D0),
+ {HL0, D2} = d_uintvar(D1),
+ <<UriData:URILen/binary,D3/binary>> = D2,
+ {FieldData,D4} = scan_header_data(D3),
+ HL1 = (HL0-(size(D3)-size(D4))),
+ <<D5:HL1/binary,Data/binary>> = D4,
+ ContentType = decode_content_type(FieldData, Version),
+ Headers = decode_headers(D5, Version),
+ #wsp_post{ type='PUT', uri=binary_to_list(UriData),
+ content_type=ContentType, headers=Headers, data=Data};
+
+ <<?WSP_Reply:8,StatusCode:8,D0/binary>> ->
+ %% 8.2.3.3 Reply
+ Status = decode_status_code(StatusCode),
+ {HL0, D1} = d_uintvar(D0),
+ {FieldData, D2} = scan_header_data(D1),
+ ContentType = decode_content_type(FieldData, Version),
+ %% Headers are headersLength - binary size of content type
+ HL1 = (HL0-(size(D1)-size(D2))),
+ <<D3:HL1/binary,Data/binary>> = D2,
+ Hs = decode_headers(D3, Version),
+ #wsp_reply{status=Status, content_type=ContentType,
+ headers=Hs, data=Data};
+
+ <<?WSP_DataFragmentPDU:8,D0/binary>> ->
+ %% 8.2.3.4 Data Fragment PDU
+ {HL0, D1} = d_uintvar(D0),
+ <<D2:HL0/binary,Data/binary>> = D1,
+ Hs = decode_headers(D2, Version),
+ #wsp_data_fragment_pdu{headers=Hs, data=Data};
+
+ %% 8.2.4.1 Push or ConfirmedPush
+ <<?WSP_Push:8,D0/binary>> ->
+ {HeadersLength, T200} = d_uintvar(D0),
+ {FieldData, T300} = scan_header_data(T200),
+ ContentType = decode_content_type(FieldData, Version),
+ RealHeadersLength = (HeadersLength-(size(T200)-size(T300))),
+ <<T400:RealHeadersLength/binary,Data/binary>> = T300,
+ Headers = decode_headers(T400, Version),
+ #wsp_push{type=push,content_type=ContentType,
+ headers=Headers,data=Data};
+
+ <<?WSP_ConfirmedPush:8,D0/binary>> ->
+ {HeadersLength, T200} = d_uintvar(D0),
+ {FieldData, T300} = scan_header_data(T200),
+ ContentType = decode_content_type(FieldData, Version),
+ RealHeadersLength = (HeadersLength-(size(T200)-size(T300))),
+ <<T400:RealHeadersLength/binary,Data/binary>> = T300,
+ Headers = decode_headers(T400, Version),
+ #wsp_push{type=confirmed_push,
+ content_type=ContentType,
+ headers=Headers,data=Data};
+
+ <<PDUType:8,T100/binary>> ->
+ #wsp_unknown_pdu { type = PDUType, data = T100 }
+ end.
+
+
+encode_pdu_type(connect) -> ?WSP_Connect;
+encode_pdu_type(connect_reply) -> ?WSP_ConnectReply;
+encode_pdu_type(redirect) -> ?WSP_Redirect;
+encode_pdu_type(reply) -> ?WSP_Reply;
+encode_pdu_type(disconnect) -> ?WSP_Disconnect;
+encode_pdu_type(push) -> ?WSP_Push;
+encode_pdu_type(confirmed_push) -> ?WSP_ConfirmedPush;
+encode_pdu_type(suspend) -> ?WSP_Suspend;
+encode_pdu_type(resume) -> ?WSP_Resume;
+encode_pdu_type(data_fragment_pdu) -> ?WSP_DataFragmentPDU;
+encode_pdu_type('GET') -> ?WSP_Get;
+encode_pdu_type('OPTIONS') -> ?WSP_Options;
+encode_pdu_type('HEAD') -> ?WSP_Head;
+encode_pdu_type('DELETE') -> ?WSP_Delete;
+encode_pdu_type('TRACE') -> ?WSP_Trace;
+encode_pdu_type('POST') -> ?WSP_Post;
+encode_pdu_type('PUT') -> ?WSP_Put;
+encode_pdu_type(Type) when integer(Type) -> Type.
+
+
+decode_pdu_type(?WSP_Connect) -> connect;
+decode_pdu_type(?WSP_ConnectReply) -> connect_reply;
+decode_pdu_type(?WSP_Redirect) -> redirect;
+decode_pdu_type(?WSP_Reply) -> reply;
+decode_pdu_type(?WSP_Disconnect) -> disconnect;
+decode_pdu_type(?WSP_Push) -> push;
+decode_pdu_type(?WSP_ConfirmedPush) -> confirmed_push;
+decode_pdu_type(?WSP_Suspend) -> suspend;
+decode_pdu_type(?WSP_Resume) -> resume;
+decode_pdu_type(?WSP_DataFragmentPDU) -> data_fragment_pdu;
+decode_pdu_type(?WSP_Get) -> 'GET';
+decode_pdu_type(?WSP_Options) -> 'OPTIONS';
+decode_pdu_type(?WSP_Head) -> 'HEAD';
+decode_pdu_type(?WSP_Delete) -> 'DELETE';
+decode_pdu_type(?WSP_Trace) -> 'TRACE';
+decode_pdu_type(?WSP_Post) -> 'POST';
+decode_pdu_type(?WSP_Put) -> 'PUT';
+decode_pdu_type(Type) -> Type. %% allow unknown pdu types.
+
+
+%% Convert various data types to list
+
+to_list(I) when integer(I) ->
+ integer_to_list(I);
+to_list(A) when atom(A) ->
+ atom_to_list(A);
+to_list(Version={X,Y}) when integer(X), integer(Y) ->
+ format_version(Version);
+to_list(DateTime={{_,_,_},{_,_,_}}) ->
+ fmt_date(DateTime);
+to_list(L) when list(L) ->
+ L.
+
+
+
+encode_capabilities(Capa) ->
+ encode_capabilities(Capa,#wsp_capabilities{}).
+
+encode_capabilities(Cap,Def) ->
+ Known =
+ [encode_capability(?WSP_CAP_ALIASES,
+ Cap#wsp_capabilities.aliases,
+ Def#wsp_capabilities.aliases),
+ encode_capability(?WSP_CAP_CLIENT_SDU_SIZE,
+ Cap#wsp_capabilities.client_sdu_size,
+ Def#wsp_capabilities.client_sdu_size),
+ encode_capability(?WSP_CAP_SERVER_SDU_SIZE,
+ Cap#wsp_capabilities.server_sdu_size,
+ Def#wsp_capabilities.server_sdu_size),
+ encode_capability(?WSP_CAP_PROTOCOL_OPTIONS,
+ Cap#wsp_capabilities.protocol_options,
+ Def#wsp_capabilities.protocol_options),
+ encode_capability(?WSP_CAP_METHOD_MOR,
+ Cap#wsp_capabilities.method_mor,
+ Def#wsp_capabilities.method_mor),
+ encode_capability(?WSP_CAP_PUSH_MOR,
+ Cap#wsp_capabilities.push_mor,
+ Def#wsp_capabilities.push_mor),
+ encode_capability(?WSP_CAP_EXTENDED_METHODS,
+ Cap#wsp_capabilities.extended_methods,
+ Def#wsp_capabilities.extended_methods),
+ encode_capability(?WSP_CAP_HEADER_CODE_PAGES,
+ Cap#wsp_capabilities.header_code_pages,
+ Def#wsp_capabilities.header_code_pages),
+ encode_capability(?WSP_CAP_CLIENT_MESSAGE_SIZE,
+ Cap#wsp_capabilities.client_message_size,
+ Def#wsp_capabilities.client_message_size),
+ encode_capability(?WSP_CAP_SERVER_MESSAGE_SIZE,
+ Cap#wsp_capabilities.server_message_size,
+ Def#wsp_capabilities.server_message_size)],
+ Unknown =
+ lists:map(fun({Id, Data}) when integer(Id) ->
+ <<1:1, Id:7, Data/binary>>;
+ ({Id,Data}) ->
+ <<(encode_text_string(Id))/binary, Data/binary>>
+ end, Cap#wsp_capabilities.unknown),
+ list_to_binary(
+ lists:map(fun(<<>>) -> [];
+ (Bin) ->
+ [e_uintvar(size(Bin)), Bin]
+ end, Known ++ Unknown)).
+
+
+
+
+encode_capability(_Capa, Default, Default) ->
+ <<>>;
+encode_capability(Capa, Value, _) ->
+ case Capa of
+ ?WSP_CAP_ALIASES ->
+ <<1:1, ?WSP_CAP_ALIASES:7, (encode_addresses(Value))/binary>>;
+
+ ?WSP_CAP_CLIENT_SDU_SIZE ->
+ <<1:1, ?WSP_CAP_CLIENT_SDU_SIZE:7, (e_uintvar(Value))/binary>>;
+
+ ?WSP_CAP_SERVER_SDU_SIZE ->
+ <<1:1, ?WSP_CAP_SERVER_SDU_SIZE:7, (e_uintvar(Value))/binary>>;
+
+ ?WSP_CAP_PROTOCOL_OPTIONS ->
+ Opts = case lists:member(confirmed_push, Value) of
+ true -> 16#80;
+ false -> 0
+ end bor
+ case lists:member(push, Value) of
+ true -> 16#40;
+ false -> 0
+ end bor
+ case lists:member(resume, Value) of
+ true -> 16#20;
+ false -> 0
+ end bor
+ case lists:member(acknowledgement_headers, Value) of
+ true -> 16#10;
+ false -> 0
+ end,
+ %% FIXME: symbolic encode/decode of options
+ <<1:1, ?WSP_CAP_PROTOCOL_OPTIONS:7, Opts>>;
+
+ ?WSP_CAP_METHOD_MOR ->
+ <<1:1, ?WSP_CAP_METHOD_MOR:7, (e_uintvar(Value))/binary>>;
+
+ ?WSP_CAP_PUSH_MOR ->
+ <<1:1, ?WSP_CAP_PUSH_MOR:7, (e_uintvar(Value))/binary>>;
+
+ ?WSP_CAP_EXTENDED_METHODS ->
+ <<1:1, ?WSP_CAP_EXTENDED_METHODS:7,
+ (encode_extended_methods(Value))/binary>>;
+
+ ?WSP_CAP_HEADER_CODE_PAGES ->
+ Data = list_to_binary(
+ lists:map(fun(Page) when integer(Page) -> Page;
+ ({Page,Name}) ->
+ [Page, encode_text_string(Name)]
+ end, Value)),
+ <<1:1, ?WSP_CAP_HEADER_CODE_PAGES:7, Data/binary>>;
+
+ ?WSP_CAP_CLIENT_MESSAGE_SIZE ->
+ <<1:1, ?WSP_CAP_CLIENT_MESSAGE_SIZE:7,
+ (e_uintvar(Value))/binary>>;
+
+ ?WSP_CAP_SERVER_MESSAGE_SIZE ->
+ <<1:1, ?WSP_CAP_SERVER_MESSAGE_SIZE:7,
+ (e_uintvar(Value))/binary>>;
+ _ when integer(Capa) ->
+ <<1:1, Capa:7, Value/binary>>;
+ _ when list(Capa) ->
+ <<(encode_text_string(Capa))/binary, Value/binary>>
+ end.
+
+
+decode_capabilities(<<>>, WspCaps) ->
+ WspCaps;
+decode_capabilities(D0,WspCaps) ->
+ {Len, D1} = d_uintvar(D0),
+ <<Capa:Len/binary, D2/binary>> = D1,
+ WspCaps1 =
+ case Capa of
+ <<1:1, Id:7, Data/binary>> ->
+ decode_capa(Id, Data, WspCaps);
+ _ ->
+ {Id,Data} = d_text_string(Capa),
+ decode_capa(Id, Data, WspCaps)
+ end,
+ decode_capabilities(D2, WspCaps1).
+
+
+
+decode_capa(Id,Data, WspCaps) ->
+ case Id of
+ ?WSP_CAP_SERVER_SDU_SIZE ->
+ {Val,_} = d_uintvar(Data),
+ WspCaps#wsp_capabilities{server_sdu_size=Val};
+
+ ?WSP_CAP_CLIENT_SDU_SIZE ->
+ {Val,_} = d_uintvar(Data),
+ WspCaps#wsp_capabilities{client_sdu_size=Val};
+
+ ?WSP_CAP_PROTOCOL_OPTIONS ->
+ <<POP,_/binary>> = Data,
+ Opts =
+ if POP band 16#80 == 16#80 -> [confirmed_push];
+ true -> []
+ end ++
+ if POP band 16#40 == 16#40 -> [push];
+ true -> []
+ end ++
+ if POP band 16#20 == 16#20 -> [resume];
+ true -> []
+ end ++
+ if POP band 16#10 == 16#10 -> [acknowledgement_headers];
+ true -> []
+ end,
+ WspCaps#wsp_capabilities{protocol_options=Opts};
+
+ ?WSP_CAP_METHOD_MOR ->
+ {Val,_} = d_uintvar(Data),
+ WspCaps#wsp_capabilities{method_mor=Val};
+
+ ?WSP_CAP_PUSH_MOR ->
+ {Val,_} = d_uintvar(Data),
+ WspCaps#wsp_capabilities{push_mor=Val};
+
+ ?WSP_CAP_EXTENDED_METHODS ->
+ Extended = decode_extended_methods(Data),
+ WspCaps#wsp_capabilities { extended_methods = Extended };
+
+ ?WSP_CAP_HEADER_CODE_PAGES ->
+ %% Client send [Code(uint8) Name(text-string)]*
+ %% Server send [Code(uint8)]*
+ io:format("FIXME: Header Code Pages = ~p\n",[Data]),
+ WspCaps;
+
+ ?WSP_CAP_ALIASES ->
+ Aliases = decode_addresses(Data),
+ WspCaps#wsp_capabilities { aliases = Aliases };
+
+ ?WSP_CAP_CLIENT_MESSAGE_SIZE ->
+ {Val,_} = d_uintvar(Data),
+ WspCaps#wsp_capabilities{client_message_size=Val};
+
+ ?WSP_CAP_SERVER_MESSAGE_SIZE ->
+ {Val,_} = d_uintvar(Data),
+ WspCaps#wsp_capabilities{server_message_size=Val};
+ _ ->
+ Unknown = [{Id, Data} | WspCaps#wsp_capabilities.unknown],
+ io:format("WARNING: ignoring unknown capability ~p\n",
+ [Unknown]),
+ WspCaps#wsp_capabilities{unknown = Unknown}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Headers = [ Header ]
+%% Header = {FieldName, FieldValue}
+%% FieldName = atom()
+%% FieldValue = {Value, Params}
+%% | Value
+%%
+%% Params = [{Param,Value} | Param]
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-define(WH(Name,Value,Params),
+ #wsp_header { name = (Name), value = (Value), params = Params}).
+
+encode_headers(Headers) ->
+ encode_headers(Headers, ?WSP_DEFAULT_VERSION).
+
+encode_headers(Headers, Version) ->
+ encode_headers(Headers, Version, []).
+
+encode_headers([H|T], Version, Acc) ->
+ encode_headers(T, Version, [encode_header(H, Version)|Acc]);
+encode_headers([], _, Acc) ->
+ list_to_binary(lists:reverse(Acc)).
+
+
+decode_headers(Bin) ->
+ decode_headers(Bin, ?WSP_DEFAULT_VERSION).
+
+decode_headers(<<>>, _Version) ->
+ [];
+decode_headers(Data, Version) ->
+ decode_headers(Data, [], Version, ?WSP_DEFAULT_CODEPAGE).
+
+
+decode_headers(<<1:1,Code:7,Data/binary>>,Acc,Version,CP) ->
+ FieldName = lookup_field_name(Code),
+ {FieldData,Data1} = scan_header_data(Data),
+ H = decode_header(FieldName, FieldData,Version,CP),
+ ?dbg("header: ~p, field data=~p, header=~p\n",
+ [FieldName, FieldData, H]),
+ if H#wsp_header.name == 'Encoding-Version' ->
+ Version1 = H#wsp_header.value,
+ ?dbg("Version switch from ~w to ~w\n", [Version, Version1]),
+ decode_headers(Data1,[H|Acc],Version1, CP);
+ true ->
+ decode_headers(Data1,[H|Acc],Version, CP)
+ end;
+decode_headers(Data = <<Code,_/binary>>,Acc,Version,CP)
+ when Code >= 32, Code < 127->
+ {TmpField,Data1} = d_text_string(Data),
+ FieldName = normalise_field_name(TmpField),
+ {FieldData,Data2} = scan_header_data(Data1),
+ H = decode_header(FieldName,FieldData,Version,CP),
+ ?dbg("header: ~p, field data=~p, header=~p\n",
+ [FieldName, FieldData, H]),
+ if H#wsp_header.name == 'Encoding-Version' ->
+ Version1 = H#wsp_header.value,
+ ?dbg("Version switch from ~w to ~w\n", [Version, Version1]),
+ decode_headers(Data2,[H|Acc],Version1, CP);
+ true ->
+ decode_headers(Data2,[H|Acc],Version, CP)
+ end;
+decode_headers(<<CP1,Data/binary>>,Acc,Version,_CP) when CP1 >= 1, CP1 =< 31 ->
+ ?dbg("decode_headers: codpage changed form ~w -> ~w\n",[_CP,CP1]),
+ decode_headers(Data,Acc,Version,CP1);
+decode_headers(<<16#7f,CP1,Data/binary>>,Acc,Version,_CP) ->
+ ?dbg("decode_headers: codpage changed form ~w -> ~w\n",[_CP,CP1]),
+ decode_headers(Data,Acc,Version,CP1);
+
+decode_headers(<<>>, Acc, _Version, _CP) ->
+ lists:reverse(Acc).
+
+%%
+%% Retrive the header data
+%% (this makes it possible to skip unknown encoding)
+%%
+scan_header_data(Data = <<N,Data0/binary>>) ->
+ if N >= 0, N =< 30 ->
+ <<Value:N/binary, Data1/binary>> = Data0,
+ {{short,Value}, Data1};
+ N == 31 ->
+ {N1, Data1} = d_uintvar(Data0),
+ <<Value:N1/binary, Data2/binary>> = Data1,
+ {{long,Value}, Data2};
+ N >= 32, N =< 127 ->
+ d_text_string(Data);
+ true ->
+ { N band 16#7f, Data0}
+ end.
+
+%%
+%% Decode header: return #wsp_header
+%%
+decode_header(Field, Value) ->
+ decode_header(Field, Value,
+ ?WSP_DEFAULT_VERSION,
+ ?WSP_DEFAULT_CODEPAGE).
+
+decode_header(Field, Value, Version, 1) ->
+ case Field of
+ 'Accept' ->
+ decode_accept(Value, Version);
+
+ 'Accept-Charset' when Version >= ?WSP_13 ->
+ decode_accept_charset(Value, Version);
+ 'Accept-Charset' ->
+ decode_accept_charset(Value, Version);
+
+ 'Accept-Encoding' when Version >= ?WSP_13 ->
+ decode_accept_encoding(Value, Version);
+ 'Accept-Encoding' ->
+ decode_accept_encoding(Value, Version);
+
+ 'Accept-Language' ->
+ decode_accept_language(Value, Version);
+ 'Accept-Ranges' ->
+ decode_accept_ranges(Value, Version);
+ 'Age' ->
+ decode_age(Value,Version);
+ 'Allow' ->
+ decode_allow(Value,Version);
+ 'Authorization' ->
+ decode_authorization(Value,Version);
+
+ 'Cache-Control' when Version >= ?WSP_14 ->
+ decode_cache_control(Value,Version);
+ 'Cache-Control' when Version >= ?WSP_13 ->
+ decode_cache_control(Value,Version);
+ 'Cache-Control' ->
+ decode_cache_control(Value,Version);
+
+ 'Connection' ->
+ decode_connection(Value,Version);
+ 'Content-Base' ->
+ decode_content_base(Value,Version);
+ 'Content-Encoding' ->
+ decode_content_encoding(Value,Version);
+ 'Content-Language' ->
+ decode_content_language(Value,Version);
+ 'Content-Length' ->
+ decode_content_length(Value,Version);
+ 'Content-Location' ->
+ decode_content_location(Value,Version);
+ 'Content-Md5' ->
+ decode_content_md5(Value,Version);
+
+ 'Content-Range' when Version >= ?WSP_13 ->
+ decode_content_range(Value,Version);
+ 'Content-Range' ->
+ decode_content_range(Value,Version);
+
+ 'Content-Type' ->
+ decode_content_type(Value,Version);
+ 'Date' ->
+ decode_date(Value, Version);
+ 'Etag' ->
+ decode_etag(Value,Version);
+ 'Expires' ->
+ decode_expires(Value,Version);
+ 'From' ->
+ decode_from(Value,Version);
+ 'Host' ->
+ decode_host(Value,Version);
+ 'If-Modified-Since' ->
+ decode_if_modified_since(Value,Version);
+ 'If-Match' ->
+ decode_if_match(Value,Version);
+ 'If-None-Match' ->
+ decode_if_none_match(Value,Version);
+ 'If-Range' ->
+ decode_if_range(Value,Version);
+ 'If-Unmodified-Since' ->
+ decode_if_unmodified_since(Value,Version);
+ 'Location' ->
+ decode_location(Value,Version);
+ 'Last-Modified' ->
+ decode_last_modified(Value,Version);
+ 'Max-Forwards' ->
+ decode_max_forwards(Value,Version);
+ 'Pragma' ->
+ decode_pragma(Value,Version);
+ 'Proxy-Authenticate' ->
+ decode_proxy_authenticate(Value,Version);
+ 'Proxy-Authorization' ->
+ decode_proxy_authorization(Value,Version);
+ 'Public' ->
+ decode_public(Value,Version);
+ 'Range' ->
+ decode_range(Value,Version);
+ 'Referer' ->
+ decode_referer(Value,Version);
+ 'Retry-After' ->
+ decode_retry_after(Value,Version);
+ 'Server' ->
+ decode_server(Value,Version);
+ 'Transfer-Encoding' ->
+ decode_transfer_encoding(Value,Version);
+ 'Upgrade' ->
+ decode_upgrade(Value,Version);
+ 'User-Agent' ->
+ decode_user_agent(Value,Version);
+ 'Vary' ->
+ decode_vary(Value,Version);
+ 'Via' ->
+ decode_via(Value,Version);
+ 'Warning' ->
+ decode_warning(Value,Version);
+ 'Www-Authenticate' ->
+ decode_www_authenticate(Value,Version);
+
+ 'Content-Disposition' when Version >= ?WSP_14 ->
+ decode_content_disposition(Value,Version);
+ 'Content-Disposition' ->
+ decode_content_disposition(Value,Version);
+
+ 'X-Wap-Application-Id' when Version >= ?WSP_12 ->
+ decode_x_wap_application_id(Value,Version);
+
+ 'X-Wap-Content-Uri' when Version >= ?WSP_12 ->
+ decode_x_wap_content_uri(Value,Version);
+
+ 'X-Wap-Initiator-Uri' when Version >= ?WSP_12 ->
+ decode_x_wap_initiator_uri(Value,Version);
+
+ 'Accept-Application' when Version >= ?WSP_12 ->
+ decode_accept_application(Value,Version);
+
+ 'Bearer-Indication' when Version >= ?WSP_12 ->
+ decode_bearer_indication(Value,Version);
+
+ 'Push-Flag' when Version >= ?WSP_12 ->
+ decode_push_flag(Value,Version);
+
+ 'Profile' when Version >= ?WSP_12 ->
+ decode_profile(Value,Version);
+
+ 'Profile-Diff' when Version >= ?WSP_12 ->
+ decode_profile_diff(Value,Version);
+
+ 'Profile-Warning' when Version >= ?WSP_12 ->
+ decode_profile_warning(Value,Version);
+
+ 'Expect' when Version >= ?WSP_15 ->
+ decode_expect(Value,Version);
+ 'Expect' when Version >= ?WSP_13 ->
+ decode_expect(Value,Version);
+
+ 'Te' when Version >= ?WSP_13 ->
+ decode_te(Value,Version);
+ 'Trailer' when Version >= ?WSP_13 ->
+ decode_trailer(Value,Version);
+
+ 'X-Wap-Tod' when Version >= ?WSP_13 ->
+ decode_x_wap_tod(Value,Version);
+ 'X-Wap.tod' when Version >= ?WSP_13 ->
+ decode_x_wap_tod(Value,Version);
+
+ 'Content-Id' when Version >= ?WSP_13 ->
+ decode_content_id(Value,Version);
+ 'Set-Cookie' when Version >= ?WSP_13 ->
+ decode_set_cookie(Value,Version);
+ 'Cookie' when Version >= ?WSP_13 ->
+ decode_cookie(Value,Version);
+
+ 'Encoding-Version' when Version >= ?WSP_13 ->
+ decode_encoding_version(Value,Version);
+ 'Profile-Warning' when Version >= ?WSP_14 ->
+ decode_profile_warning(Value,Version);
+
+ 'X-Wap-Security' when Version >= ?WSP_14 ->
+ decode_x_wap_security(Value,Version);
+ 'X-Wap-Loc-Invocation' when Version >= ?WSP_15 ->
+ decode_x_wap_loc_invocation(Value,Version); %% ???
+ 'X-Wap-Loc-Delivery' when Version >= ?WSP_15 ->
+ decode_x_wap_loc_delivery(Value,Version); %% ???
+ _ ->
+ ?dbg("Warning: none standard field ~p in version ~p codepage=1\n",
+ [Field, Version]),
+ ?WH(Field, Value, [])
+ end;
+decode_header(Field, Value, _Version, _CP) ->
+ ?dbg("Warning: none standard field ~p in version ~p codepage=~w\n",
+ [Field, _Version, _CP]),
+ ?WH(Field, Value, []).
+
+%%
+%% Encode field and value according to version
+%% FIXME: spilt multiple header values (i.e Via) into multiple
+%% headers
+%%
+encode_header(H, Version) ->
+ case H#wsp_header.name of
+ 'Accept' ->
+ [16#80, encode_accept(H, Version)];
+ 'Accept-Charset' when Version >= ?WSP_13 ->
+ [16#bb, encode_accept_charset(H, Version)];
+ 'Accept-Charset' ->
+ [16#81, encode_accept_charset(H, Version)];
+ 'Accept-Encoding' when Version >= ?WSP_13 ->
+ [16#bc, encode_accept_encoding(H, Version)];
+ 'Accept-Encoding' ->
+ [16#82, encode_accept_encoding(H, Version)];
+ 'Accept-Language' ->
+ [16#83, encode_accept_language(H, Version)];
+ 'Accept-Ranges' ->
+ [16#84, encode_accept_ranges(H, Version)];
+ 'Accept-Application' when Version >= ?WSP_12 ->
+ [16#b2, encode_accept_application(H,Version)];
+ 'Age' ->
+ [16#85, encode_age(H, Version)];
+ 'Allow' ->
+ [16#86, encode_allow(H, Version)];
+ 'Authorization' ->
+ [16#87, encode_authorization(H, Version)];
+ 'Cache-Control' when Version >= ?WSP_14 ->
+ [16#c7, encode_cache_control(H, Version)];
+ 'Cache-Control' when Version >= ?WSP_13 ->
+ [16#bd, encode_cache_control(H, Version)];
+ 'Cache-Control' ->
+ [16#88, encode_cache_control(H, Version)];
+ 'Connection' ->
+ [16#89, encode_connection(H, Version)];
+ 'Content-Base' ->
+ [16#8a, encode_content_base(H, Version)];
+ 'Content-Encoding' ->
+ [16#8b, encode_content_encoding(H, Version)];
+
+ 'Content-Language' ->
+ [16#8c, encode_content_language(H,Version)];
+ 'Content-Length' ->
+ [16#8d, encode_content_length(H,Version)];
+ 'Content-Location' ->
+ [16#8e, encode_content_location(H,Version)];
+ 'Content-Md5' ->
+ [16#8f, encode_content_md5(H,Version)];
+ 'Content-Range' when Version >= ?WSP_13 ->
+ [16#be, encode_content_range(H,Version)];
+ 'Content-Range' ->
+ [16#90, encode_content_range(H,Version)];
+ 'Content-Type' ->
+ [16#91, encode_content_type(H,Version)];
+ 'Date' ->
+ [16#92, encode_date(H,Version)];
+ 'Etag' ->
+ [16#93, encode_etag(H,Version)];
+ 'Expires' ->
+ [16#94, encode_expires(H,Version)];
+ 'From' ->
+ [16#95, encode_from(H,Version)];
+ 'Host' ->
+ [16#96, encode_host(H,Version)];
+ 'If-Modified-Since' ->
+ [16#97, encode_if_modified_since(H,Version)];
+ 'If-Match' ->
+ [16#98, encode_if_match(H,Version)];
+ 'If-None-Match' ->
+ [16#99, encode_if_none_match(H,Version)];
+ 'If-Range' ->
+ [16#9a, encode_if_range(H,Version)];
+ 'If-Unmodified-Since' ->
+ [16#9b, encode_if_unmodified_since(H,Version)];
+ 'Location' ->
+ [16#9c, encode_location(H,Version)];
+ 'Last-Modified' ->
+ [16#9d, encode_last_modified(H,Version)];
+ 'Max-Forwards' ->
+ [16#9e, encode_max_forwards(H,Version)];
+ 'Pragma' ->
+ [16#9f, encode_pragma(H,Version)];
+ 'Proxy-Authenticate' ->
+ [16#a0, encode_proxy_authenticate(H,Version)];
+ 'Proxy-Authorization' ->
+ [16#a1, encode_proxy_authorization(H,Version)];
+ 'Public' ->
+ [16#a2, encode_public(H,Version)];
+ 'Range' ->
+ [16#a3, encode_range(H,Version)];
+ 'Referer' ->
+ [16#a4, encode_referer(H,Version)];
+ 'Retry-After' ->
+ [16#a5, encode_retry_after(H,Version)];
+ 'Server' ->
+ [16#a6, encode_server(H,Version)];
+ 'Transfer-Encoding' ->
+ [16#a7, encode_transfer_encoding(H,Version)];
+ 'Upgrade' ->
+ [16#a8, encode_upgrade(H,Version)];
+ 'User-Agent' ->
+ [16#a9, encode_user_agent(H,Version)];
+ 'Vary' ->
+ [16#aa, encode_vary(H,Version)];
+ 'Via' ->
+ [16#ab, encode_via(H,Version)];
+ 'Warning' ->
+ [16#ac, encode_warning(H,Version)];
+ 'Www-Authenticate' ->
+ [16#ad, encode_www_authenticate(H,Version)];
+
+ 'Content-Disposition' when Version >= ?WSP_14 ->
+ [16#c5, encode_content_disposition(H,Version)];
+ 'Content-Disposition' ->
+ [16#ae, encode_content_disposition(H,Version)];
+
+
+ 'X-Wap-Application-Id' when Version >= ?WSP_12 ->
+ [16#af, encode_x_wap_application_id(H,Version)];
+ 'X-Wap-Content-Uri' when Version >= ?WSP_12 ->
+ [16#b0, encode_x_wap_content_uri(H,Version)];
+ 'X-Wap-Initiator-Uri' when Version >= ?WSP_12 ->
+ [16#b1, encode_x_wap_initiator_uri(H,Version)];
+
+ 'Bearer-Indication' when Version >= ?WSP_12 ->
+ [16#b3, encode_bearer_indication(H,Version)];
+ 'Push-Flag' when Version >= ?WSP_12 ->
+ [16#b4, encode_push_flag(H,Version)];
+
+ 'Profile' when Version >= ?WSP_12 ->
+ [16#b5, encode_profile(H,Version)];
+ 'Profile-Diff' when Version >= ?WSP_12 ->
+ [16#b6, encode_profile_diff(H,Version)];
+ 'Profile-Warning' when Version >= ?WSP_14 ->
+ [16#c4, encode_profile_warning(H,Version)];
+ 'Profile-Warning' when Version >= ?WSP_12 ->
+ [16#b7, encode_profile_warning(H,Version)];
+
+ 'Expect' when Version >= ?WSP_15 ->
+ [16#c8, encode_expect(H,Version)];
+ 'Expect' when Version >= ?WSP_13 ->
+ [16#b8, encode_expect(H,Version)];
+ 'Te' when Version >= ?WSP_13 ->
+ [16#b9, encode_te(H,Version)];
+ 'Trailer' when Version >= ?WSP_13 ->
+ [16#ba, encode_trailer(H,Version)];
+ 'X-Wap-Tod' when Version >= ?WSP_13 ->
+ [16#bf, encode_x_wap_tod(H,Version)];
+ 'Content-Id' when Version >= ?WSP_13 ->
+ [16#c0, encode_content_id(H,Version)];
+ 'Set-Cookie' when Version >= ?WSP_13 ->
+ [16#c1, encode_set_cookie(H,Version)];
+ 'Cookie' when Version >= ?WSP_13 ->
+ [16#c2, encode_cookie(H,Version)];
+ 'Encoding-Version' when Version >= ?WSP_13 ->
+ [16#c3, encode_encoding_version(H,Version)];
+ 'Encoding-Version' when Version < ?WSP_13 ->
+ [encode_text_string("Encoding-Version"),
+ encode_text_string(lists:flatten(format_version(H#wsp_header.value)))];
+
+ 'X-Wap-Security' when Version >= ?WSP_14 ->
+ [16#c6, encode_x_wap_security(H,Version)];
+ 'X-Wap-Loc-Invocation' when Version >= ?WSP_15 ->
+ [16#c9, encode_x_wap_loc_invocation(H,Version)];
+ 'X-Wap-Loc-Delivery' when Version >= ?WSP_15 ->
+ [16#ca, encode_x_wap_loc_delivery(H,Version)];
+ Field when atom(Field) ->
+ [encode_text_string(atom_to_list(Field)),
+ encode_text_string(H#wsp_header.value)];
+ Field when list(Field) ->
+ [encode_text_string(Field),
+ encode_text_string(H#wsp_header.value)]
+ end.
+
+%%
+%% Convert HTTP headers into WSP headers
+%%
+parse_headers([H | Hs]) ->
+ parse_header(H, Hs);
+parse_headers([]) ->
+ [].
+
+parse_header(H) ->
+ parse_header(H, []).
+
+parse_header({FieldName,FieldValue}, Hs) ->
+ case single_comma_field(FieldName) of
+ true ->
+ io:format("parse: ~s: ~s\n", [FieldName, FieldValue]),
+ H = parse_hdr(FieldName,FieldValue),
+ io:format("header: ~p\n", [H]),
+ [H | parse_headers(Hs)];
+ false ->
+ Values = string:tokens(FieldValue, ","),
+ parse_header(FieldName, Values, Hs)
+ end.
+
+parse_header(FieldName, [Value|Vs], Hs) ->
+ io:format("parse: ~s: ~s\n", [FieldName, Value]),
+ H = parse_hdr(FieldName, Value),
+ io:format("header: ~p\n", [H]),
+ [H | parse_header(FieldName, Vs, Hs)];
+parse_header(_FieldName, [], Hs) ->
+ parse_headers(Hs).
+
+
+single_comma_field(Field) ->
+ case Field of
+ 'Set-Cookie' -> true; %% FIXME (Is multiple!)
+ 'Date' -> true;
+ 'Expires' -> true;
+ 'If-Modified-Since' -> true;
+ 'If-Range' -> true;
+ 'If-Unmodified-Since' -> true;
+ 'Last-Modified' -> true;
+ 'Retry-After' -> true;
+ 'X-Wap-Tod' -> true;
+ _ -> false
+ end.
+
+
+parse_hdr(Field, Value0) ->
+ Value = trim(Value0),
+ case Field of
+ 'Accept' -> parse_accept(Value);
+ 'Accept-Charset' -> parse_accept_charset(Value);
+ 'Accept-Encoding' -> parse_accept_encoding(Value);
+ 'Accept-Language' -> parse_accept_language(Value);
+ 'Accept-Ranges' -> parse_accept_ranges(Value);
+ 'Age' -> parse_age(Value);
+ 'Allow' -> parse_allow(Value);
+ 'Authorization' -> parse_authorization(Value);
+ 'Cache-Control' -> parse_cache_control(Value);
+ 'Connection' -> parse_connection(Value);
+ 'Content-Base' -> parse_content_base(Value);
+ 'Content-Encoding' -> parse_content_encoding(Value);
+ 'Content-Language' -> parse_content_language(Value);
+ 'Content-Length' -> parse_content_length(Value);
+ 'Content-Location' -> parse_content_location(Value);
+ 'Content-Md5' -> parse_content_md5(Value);
+ 'Content-Range' -> parse_content_range(Value);
+ 'Content-Type' -> parse_content_type(Value);
+ 'Date' -> parse_date(Value);
+ 'Etag' -> parse_etag(Value);
+ 'Expires' -> parse_expires(Value);
+ 'From' -> parse_from(Value);
+ 'Host' -> parse_host(Value);
+ 'If-Modified-Since' -> parse_if_modified_since(Value);
+ 'If-Match' -> parse_if_match(Value);
+ 'If-None-Match' -> parse_if_none_match(Value);
+ 'If-Range' -> parse_if_range(Value);
+ 'If-Unmodified-Since' -> parse_if_unmodified_since(Value);
+ 'Location' -> parse_location(Value);
+ 'Last-Modified' -> parse_last_modified(Value);
+ 'Max-Forwards' -> parse_max_forwards(Value);
+ 'Pragma' -> parse_pragma(Value);
+ 'Proxy-Authenticate' -> parse_proxy_authenticate(Value);
+ 'Proxy-Authorization' -> parse_proxy_authorization(Value);
+ 'Public' -> parse_public(Value);
+ 'Range' -> parse_range(Value);
+ 'Referer' -> parse_referer(Value);
+ 'Retry-After' -> parse_retry_after(Value);
+ 'Server' -> parse_server(Value);
+ 'Transfer-Encoding' -> parse_transfer_encoding(Value);
+ 'Upgrade' -> parse_upgrade(Value);
+ 'User-Agent' -> parse_user_agent(Value);
+ 'Vary' -> parse_vary(Value);
+ 'Via' -> parse_via(Value);
+ 'Warning' -> parse_warning(Value);
+ 'Www-Authenticate' -> parse_www_authenticate(Value);
+ 'Content-Disposition' -> parse_content_disposition(Value);
+ 'X-Wap-Application-Id' -> parse_x_wap_application_id(Value);
+ 'X-Wap-Content-Uri' -> parse_x_wap_content_uri(Value);
+ 'X-Wap-Initiator-Uri' -> parse_x_wap_initiator_uri(Value);
+ 'Accept-Application' -> parse_accept_application(Value);
+ 'Bearer-Indication' -> parse_bearer_indication(Value);
+ 'Push-Flag' -> parse_push_flag(Value);
+ 'Profile' -> parse_profile(Value);
+ 'Profile-Diff' -> parse_profile_diff(Value);
+ 'Profile-Warning' -> parse_profile_warning(Value);
+ 'Expect' -> parse_expect(Value);
+ 'Te' -> parse_te(Value);
+ 'Trailer' -> parse_trailer(Value);
+ 'X-Wap-Tod' -> parse_x_wap_tod(Value);
+ 'Content-Id' -> parse_content_id(Value);
+ 'Set-Cookie' -> parse_set_cookie(Value);
+ 'Cookie' -> parse_cookie(Value);
+ 'Encoding-Version' -> parse_encoding_version(Value);
+ 'X-Wap-Security' -> parse_x_wap_security(Value);
+ 'X-Wap-Loc-Invocation' -> parse_x_wap_loc_invocation(Value);
+ 'X-Wap-Loc-Delivery' -> parse_x_wap_loc_delivery(Value);
+ _ ->
+ ?dbg("Warning: header field ~p not recognissed\n",[Field]),
+ #wsp_header { name = Field, value = Value}
+ end.
+
+%%
+%% Format headers, will combine multiple headers into one
+%% FIXME: if length is < MAX_HTTP_HEADER_LENGTH
+%%
+format_headers(Hs) ->
+ format_hdrs(lists:keysort(#wsp_header.name,Hs), []).
+
+format_hdrs([H | Hs], Acc) ->
+ V1 = format_value(H),
+ format_hdrs(Hs, H#wsp_header.name, V1, Acc);
+format_hdrs([], Acc) ->
+ lists:reverse(Acc).
+
+format_hdrs([H|Hs], FieldName, FieldValue, Acc)
+ when FieldName == H#wsp_header.name ->
+ V1 = format_value(H),
+ format_hdrs(Hs, FieldName, [FieldValue,",",V1], Acc);
+format_hdrs(Hs, FieldName, FieldValue, Acc) ->
+ format_hdrs(Hs, [{FieldName, lists:flatten(FieldValue)} | Acc]).
+
+
+%%
+%% Format header: #wsp_header => {FieldName, Value}
+%%
+
+format_header(H) ->
+ {H#wsp_header.name, format_value(H)}.
+
+format_value(H) ->
+ case H#wsp_header.name of
+ 'Accept' -> format_accept(H);
+ 'Accept-Charset' -> format_accept_charset(H);
+ 'Accept-Encoding' -> format_accept_encoding(H);
+ 'Accept-Language' -> format_accept_language(H);
+ 'Accept-Ranges' -> format_accept_ranges(H);
+ 'Age' -> format_age(H);
+ 'Allow' -> format_allow(H);
+ 'Authorization' -> format_authorization(H);
+ 'Cache-Control' -> format_cache_control(H);
+ 'Connection' -> format_connection(H);
+ 'Content-Base' -> format_content_base(H);
+ 'Content-Encoding' -> format_content_encoding(H);
+ 'Content-Language' -> format_content_language(H);
+ 'Content-Length' -> format_content_length(H);
+ 'Content-Location' -> format_content_location(H);
+ 'Content-Md5' -> format_content_md5(H);
+ 'Content-Range' -> format_content_range(H);
+ 'Content-Type' -> format_content_type(H);
+ 'Date' -> format_date(H);
+ 'Etag' -> format_etag(H);
+ 'Expires' -> format_expires(H);
+ 'From' -> format_from(H);
+ 'Host' -> format_host(H);
+ 'If-Modified-Since' -> format_if_modified_since(H);
+ 'If-Match' -> format_if_match(H);
+ 'If-None-Match' -> format_if_none_match(H);
+ 'If-Range' -> format_if_range(H);
+ 'If-Unmodified-Since' -> format_if_unmodified_since(H);
+ 'Location' -> format_location(H);
+ 'Last-Modified' -> format_last_modified(H);
+ 'Max-Forwards' -> format_max_forwards(H);
+ 'Pragma' -> format_pragma(H);
+ 'Proxy-Authenticate' -> format_proxy_authenticate(H);
+ 'Proxy-Authorization' -> format_proxy_authorization(H);
+ 'Public' -> format_public(H);
+ 'Range' -> format_range(H);
+ 'Referer' -> format_referer(H);
+ 'Retry-After' -> format_retry_after(H);
+ 'Server' -> format_server(H);
+ 'Transfer-Encoding' -> format_transfer_encoding(H);
+ 'Upgrade' -> format_upgrade(H);
+ 'User-Agent' -> format_user_agent(H);
+ 'Vary' -> format_vary(H);
+ 'Via' -> format_via(H);
+ 'Warning' -> format_warning(H);
+ 'Www-Authenticate' -> format_www_authenticate(H);
+ 'Content-Disposition' -> format_content_disposition(H);
+ 'X-Wap-Application-Id' -> format_x_wap_application_id(H);
+ 'X-Wap-Content-Uri' -> format_x_wap_content_uri(H);
+ 'X-Wap-Initiator-Uri' -> format_x_wap_initiator_uri(H);
+ 'Accept-Application' -> format_accept_application(H);
+ 'Bearer-Indication' -> format_bearer_indication(H);
+ 'Push-Flag' -> format_push_flag(H);
+ 'Profile' -> format_profile(H);
+ 'Profile-Diff' -> format_profile_diff(H);
+ 'Profile-Warning' -> format_profile_warning(H);
+ 'Expect' -> format_expect(H);
+ 'Te' -> format_te(H);
+ 'Trailer' -> format_trailer(H);
+ 'X-Wap-Tod' -> format_x_wap_tod(H);
+ 'Content-Id' -> format_content_id(H);
+ 'Set-Cookie' -> format_set_cookie(H);
+ 'Cookie' -> format_cookie(H);
+ 'Encoding-Version' -> format_encoding_version(H);
+ 'X-Wap-Security' -> format_x_wap_security(H);
+ 'X-Wap-Loc-Invocation' -> format_x_wap_loc_invocation(H);
+ 'X-Wap-Loc-Delivery' -> format_x_wap_loc_delivery(H);
+ _Field ->
+ ?dbg("Warning: header field ~s not recognissed\n",[_Field]),
+ to_list(H#wsp_header.value)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Encode of field values
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Accept: <content-type> [q=<q-value>] [params]
+%% Type: Multiple
+%% Ref: 8.4.2.7
+%%
+%% Accept-value = Constrained-media | Accept-general-form
+%%
+%% Accept-general-form = Value-length Media-range [Accept-parameters]
+%% Media-range = (Well-known-media | Extension-media) *(Parameter)
+%% Accept-parameters = Q-token Q-value *(Accept-extension)
+%% Accept-extension = Parameter
+%% Constrain-media = Constrained-encoding
+%% Well-known-media = Integer-value
+%% Constrained-encoding = Short-Integer | Extension-media
+%% Q-token = <Octet 128>
+%%
+parse_accept(String) ->
+ %% FIXME
+ ?WH('Accept',String,[]).
+
+format_accept(H) ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)].
+
+encode_accept(H, Version) ->
+ case encode_params(H#wsp_header.params,Version) of
+ <<>> ->
+ encode_well_known_media(H#wsp_header.value, Version);
+ Params ->
+ Media = encode_well_known_media(H#wsp_header.value, Version),
+ e_value(Media, Params)
+ end.
+
+decode_accept(Value, Version) when integer(Value) ->
+ %% Constrained-encoding: Short-Integer
+ ?WH('Accept',decode_well_known_media(Value, Version),[]);
+decode_accept(Value, Version) when list(Value) ->
+ ?WH('Accept',decode_well_known_media(Value,Version),[]);
+decode_accept({_,Data}, Version) ->
+ %% Accept-general-form
+ {Value,QData} = scan_header_data(Data),
+ Media_Range = decode_well_known_media(Value,Version),
+ Params = decode_params(QData, Version),
+ ?WH('Accept',Media_Range,Params).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Accept-Charset: <charset> | * [q=<q-value>]
+%% Type: Multiple
+%% Ref: 8.4.2.8
+%% Note that the definition of this one is a mess!!!!
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_accept_charset(String) ->
+ %% FIXME
+ ?WH('Accept-Charset',String,[]).
+
+format_accept_charset(H) ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)].
+
+encode_accept_charset(H, _Version) ->
+ %% FIXME
+ encode_text_string(H#wsp_header.value).
+
+decode_accept_charset(0, _Version) ->
+ ?WH('Accept-Charset',"*",[]);
+decode_accept_charset(Value, _Version) when integer(Value) ->
+ ?WH('Accept-Charset', decode_charset(Value),[]);
+decode_accept_charset(Value, _Version) when list(Value) ->
+ ?WH('Accept-Charset',Value,[]);
+decode_accept_charset({short,Data}, _Version) ->
+ %% Me guessing that the short form SHOULD be mulit octet integer!!!
+ Value = d_long(Data),
+ ?WH('Accept-Charset', decode_charset(Value),[]);
+decode_accept_charset({long,Value}, _Version) ->
+ {Data1, QData} = scan_header_data(Value),
+ CharSet = case Data1 of
+ 0 ->
+ "*";
+ Value1 when integer(Value1) ->
+ decode_charset(Value1);
+ Value1 when list(Value1) ->
+ Value1;
+ {short,Value1} ->
+ Value2 = d_long(Value1),
+ decode_charset(Value2)
+ end,
+ Params = if QData == <<>> ->
+ [];
+ true ->
+ {QValue,_} = d_q_value(QData),
+ {CharSet,[{q, QValue}]}
+ end,
+ ?WH('Accept-Charset',CharSet, Params).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Accept-Encoding: gzip | compress | deflate | * [q=<q-value>]
+%% Ref:
+%% Type: Multiple
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_accept_encoding(String) ->
+ ?WH('Accept-Encoding',String,[]).
+
+format_accept_encoding(H) ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)].
+
+encode_accept_encoding(H, _Version) ->
+ %% FIXME general form
+ case H#wsp_header.value of
+ "gzip" -> ?ENCODE_SHORT(0);
+ "compress" -> ?ENCODE_SHORT(1);
+ "deflate" -> ?ENCODE_SHORT(2);
+ Value -> encode_text_string(Value)
+ end.
+
+decode_accept_encoding(0, _Version) ->
+ ?WH('Accept-Encoding',"gzip",[]);
+decode_accept_encoding(1, _Version) ->
+ ?WH('Accept-Encoding',"compress",[]);
+decode_accept_encoding(2, _Version) ->
+ ?WH('Accept-Encoding',"deflate",[]);
+decode_accept_encoding(Value, Version) when list(Version) ->
+ ?WH('Accept-Encoding',Value,[]);
+decode_accept_encoding({_,Data}, _Version) when binary(Data) ->
+ {Enc, Data1} = scan_header_data(Data),
+ Params = if Data1 == <<>> ->
+ [];
+ true ->
+ {QVal,_} = d_q_value(Data1),
+ [{q, QVal}]
+ end,
+ case Enc of
+ 0 -> ?WH('Accept-Encoding',"gzip",Params);
+ 1 -> ?WH('Accept-Encoding',"compress",Params);
+ 2 -> ?WH('Accept-Encoding',"deflate",Params);
+ 3 -> ?WH('Accept-Encoding',"*",Params);
+ _ when list(Enc) ->
+ ?WH('Accept-Encoding',Enc,Params)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%
+%% Accept-Language: * | <lang> [q=<q-value>]
+%% Type: Multiple
+%% Ref: 8.4.2.10
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_accept_language(Value) ->
+ ?WH('Accept-Language',Value,[]).
+
+format_accept_language(H) ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)].
+
+encode_accept_language(H, _Version) ->
+ case H#wsp_header.value of
+ "*" -> ?ENCODE_SHORT(0);
+ Lang -> case catch encode_lang(Lang) of
+ {'EXIT', _} -> encode_text_string(Lang);
+ Code -> encode_integer(Code)
+ end
+ end.
+
+decode_accept_language(0, _Version) ->
+ ?WH('Accept-Language',"*",[]);
+decode_accept_language(Value, _Version) when integer(Value) ->
+ ?WH('Accept-Language',decode_lang(Value),[]);
+decode_accept_language(Value, _Version) when list(Value) ->
+ ?WH('Accept-Language',Value,[]);
+decode_accept_language({_,Data}, _Version) ->
+ {Data1, QData} = scan_header_data(Data),
+ Charset = case Data1 of
+ 0 ->
+ "*";
+ Value1 when integer(Value1) ->
+ decode_lang(Value1);
+ Value1 when list(Value1) ->
+ Value1;
+ {short,Data2} ->
+ decode_lang(d_long(Data2))
+ end,
+ Params =
+ if QData == <<>> ->
+ [];
+ true ->
+ {QVal,_} = d_q_value(QData),
+ [{q, QVal}]
+ end,
+ ?WH('Accept-Language',Charset,Params).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Accept-Ranges: none | bytes | <extension>
+%% Type: single
+%% Ref:
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_accept_ranges(Value) ->
+ ?WH('Accept-Ranges', Value, []).
+
+format_accept_ranges(H) ->
+ H#wsp_header.value.
+
+encode_accept_ranges(H, _Version) ->
+ case H#wsp_header.value of
+ "none" -> ?ENCODE_SHORT(0);
+ "bytes" -> ?ENCODE_SHORT(1);
+ Value -> encode_text_string(Value)
+ end.
+
+decode_accept_ranges(0, _Version) ->
+ ?WH('Accept-Ranges', "none", []);
+decode_accept_ranges(1, _Version) ->
+ ?WH('Accept-Ranges', "bytes", []);
+decode_accept_ranges(Value, _Version) when list(Value) ->
+ ?WH('Accept-Ranges', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Age: <delta-seconds>
+%% Type: single
+%% Ref:
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_age(Value) ->
+ %% FIXME
+ ?WH('Age', Value, []).
+
+format_age(H) ->
+ integer_to_list(H#wsp_header.value).
+
+encode_age(H, _Version) ->
+ e_delta_seconds(H#wsp_header.value).
+
+decode_age(Value, _Version) when integer(Value) ->
+ ?WH('Age', Value, []);
+decode_age({short,Data}, _Version) ->
+ ?WH('Age', d_long(Data), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Allow: <well-known-method>
+%% Type: multiple
+%% Ref:
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_allow(Value) ->
+ ?WH('Allow', parse_well_known_method(Value), []).
+
+format_allow(H) ->
+ atom_to_list(H#wsp_header.value).
+
+encode_allow(H, Version) ->
+ encode_well_known_method(H#wsp_header.value, Version).
+
+decode_allow(Value, Version) ->
+ ?WH('Allow', decode_well_known_method(Value,Version), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Authorization:
+%% Ref: 8.4.2.14
+%% Type: server-to-client
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_authorization(Value) ->
+ parse_credentials('Authorization', Value).
+
+format_authorization(H) ->
+ format_credentials(H#wsp_header.value, H#wsp_header.params).
+
+encode_authorization(H, Version) ->
+ encode_credentials(H#wsp_header.value, H#wsp_header.params, Version).
+
+decode_authorization({_,Data}, Version) ->
+ decode_credentials('Authorization', Data, Version).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%%
+%% Cache-Control:
+%% 8.4.2.15
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_cache_control(Value) ->
+ case Value of
+ "no-cache" -> ?WH('Cache-Control',Value,[]);
+ "no-store" -> ?WH('Cache-Control',Value,[]);
+ "max-stale" -> ?WH('Cache-Control',Value,[]);
+ "only-if-cached" -> ?WH('Cache-Control',Value,[]);
+ "private" -> ?WH('Cache-Control',Value,[]);
+ "public" -> ?WH('Cache-Control',Value,[]);
+ "no-transform" -> ?WH('Cache-Control',Value,[]);
+ "must-revalidate" -> ?WH('Cache-Control',Value,[]);
+ "proxy-revalidate" -> ?WH('Cache-Control',Value,[]);
+ _ ->
+ Params = parse_params([Value]),
+ ?WH('Cache-Control',"",Params)
+ end.
+
+format_cache_control(H) ->
+ if H#wsp_header.value == "" ->
+ format_params0(H#wsp_header.params);
+ true ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)]
+ end.
+
+
+
+encode_cache_control(H, Version) ->
+ case H#wsp_header.value of
+ "no-cache" -> ?ENCODE_SHORT(0);
+ "no-store" -> ?ENCODE_SHORT(1);
+ "max-stale" -> ?ENCODE_SHORT(3);
+ "only-if-cached" -> ?ENCODE_SHORT(5);
+ "private" -> ?ENCODE_SHORT(7);
+ "public" -> ?ENCODE_SHORT(6);
+ "no-transform" -> ?ENCODE_SHORT(8);
+ "must-revalidate" -> ?ENCODE_SHORT(9);
+ "proxy-revalidate" -> ?ENCODE_SHORT(10);
+ "" ->
+ case H#wsp_header.params of
+ [{'no-cache',Field}] ->
+ e_value(?ENCODE_SHORT(0),
+ e_field_name(Field,Version));
+ [{'max-age',Sec}] ->
+ e_value(?ENCODE_SHORT(2),
+ e_delta_seconds(Sec));
+ [{'max-fresh',Sec}] ->
+ e_value(?ENCODE_SHORT(4),
+ e_delta_seconds(Sec));
+ [{'private',Field}] ->
+ e_value(?ENCODE_SHORT(7),
+ e_field_name(Field,Version));
+ [{'s-maxage',Sec}] ->
+ e_value(?ENCODE_SHORT(11),
+ e_delta_seconds(Sec))
+ end;
+ Ext ->
+ [Param] = H#wsp_header.params,
+ e_value(encode_text_string(Ext),
+ encode_parameter(Param, Version))
+ end.
+
+
+decode_cache_control(Value, _Version) when integer(Value) ->
+ case Value of
+ 0 -> ?WH('Cache-Control',"no-cache",[]);
+ 1 -> ?WH('Cache-Control',"no-store",[]);
+ 3 -> ?WH('Cache-Control',"max-stale",[]);
+ 5 -> ?WH('Cache-Control',"only-if-cached",[]);
+ 7 -> ?WH('Cache-Control',"private",[]);
+ 6 -> ?WH('Cache-Control',"public",[]);
+ 8 -> ?WH('Cache-Control',"no-transform",[]);
+ 9 -> ?WH('Cache-Control',"must-revalidate",[]);
+ 10 -> ?WH('Cache-Control',"proxy-revalidate",[])
+ end;
+decode_cache_control(Value, _Version) when list(Value) ->
+ ?WH('Cache-Control',Value,[]);
+decode_cache_control({_,Data},Version) ->
+ {CacheDir, Data1} = scan_header_data(Data),
+ case CacheDir of
+ 0 ->
+ {Field,_} = d_field_name(Data1),
+ ?WH('Cache-Control',"",[{'no-cache',Field}]);
+ 2 ->
+ {Sec,_} = d_integer_value(Data1),
+ ?WH('Cache-Control',"",[{'max-age',Sec}]);
+ 4 ->
+ {Sec,_} = d_integer_value(Data1),
+ ?WH('Cache-Control',"",[{'max-fresh',Sec}]);
+ 7 ->
+ {Field,_} = d_field_name(Data1),
+ ?WH('Cache-Control',"",[{private,Field}]);
+ 11 ->
+ {Sec,_} = d_integer_value(Data1),
+ ?WH('Cache-Control',"",[{'s-maxage',Sec}]);
+ Ext when list(Ext) ->
+ {Param,_} = decode_parameter(Data1, Version),
+ ?WH('Cache-Control',Ext,[Param])
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Connection: close | Ext
+%% Type: single
+%% Ref: 8.4.2.16
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_connection(Value) ->
+ ?WH('Connection', Value, []).
+
+format_connection(H) ->
+ H#wsp_header.value.
+
+encode_connection(H, _Version) ->
+ case H#wsp_header.value of
+ "close" -> ?ENCODE_SHORT(0);
+ Value -> encode_text_string(Value)
+ end.
+
+decode_connection(0, _Version) ->
+ ?WH('Connection', "close", []);
+decode_connection(Value, _Version) when list(Value) ->
+ ?WH('Connection', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Base: <uri>
+%% Type: single
+%% Ref:
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_content_base(Value) ->
+ ?WH('Content-Base', Value, []).
+
+format_content_base(H) ->
+ H#wsp_header.value.
+
+encode_content_base(H, _Version) ->
+ encode_uri_value(H#wsp_header.value).
+
+decode_content_base(Value, _Version) when list(Value) ->
+ ?WH('Content-Base', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Encoding:
+%% Ref: 8.4.2.18
+%% Type: single
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_content_encoding(Value) ->
+ ?WH('Content-Encoding', tolower(Value), []).
+
+format_content_encoding(H) ->
+ H#wsp_header.value.
+
+encode_content_encoding(H, _Version) ->
+ case H#wsp_header.value of
+ "gzip" -> ?ENCODE_SHORT(0);
+ "compress" -> ?ENCODE_SHORT(1);
+ "deflate" -> ?ENCODE_SHORT(2);
+ Value -> encode_text_string(Value)
+ end.
+
+decode_content_encoding(0, _Version) ->
+ ?WH('Content-Encoding', "gzip", []);
+decode_content_encoding(1, _Version) ->
+ ?WH('Content-Encoding', "compress", []);
+decode_content_encoding(2, _Version) ->
+ ?WH('Content-Encoding',"deflate", []);
+decode_content_encoding(Value, _Version) when list(Value) ->
+ ?WH('Content-Encoding', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Language:
+%% Ref: 8.4.2.19
+%% Type: single
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_content_language(Value) ->
+ ?WH('Content-Language', Value, []).
+
+format_content_language(H) ->
+ H#wsp_header.value.
+
+encode_content_language(H, _Version) ->
+ case H#wsp_header.value of
+ "*" -> ?ENCODE_SHORT(0);
+ Lang -> case catch encode_lang(Lang) of
+ {'EXIT', _} -> encode_text_string(Lang);
+ Code -> encode_integer(Code)
+ end
+ end.
+
+decode_content_language(0, _Version) ->
+ ?WH('Content-Language',"*",[]);
+decode_content_language(Value, _Version) when integer(Value) ->
+ ?WH('Content-Language',decode_lang(Value),[]);
+decode_content_language(Value, _Version) when list(Value) ->
+ ?WH('Content-Language',Value,[]);
+decode_content_language({short,Data}, _Version) ->
+ Value = d_long(Data),
+ ?WH('Content-Language',decode_lang(Value),[]);
+decode_content_language(Value, _Version) when list(Value) ->
+ ?WH('Content-Language',Value,[]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Length: <integer-value>
+%% Ref: 8.4.2.20
+%% Type: single
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_content_length(Value) ->
+ ?WH('Content-Length', list_to_integer(Value), []).
+
+format_content_length(H) ->
+ integer_to_list(H#wsp_header.value).
+
+encode_content_length(H, _Version) ->
+ encode_integer(H#wsp_header.value).
+
+decode_content_length(Value, _Version) when integer(Value) ->
+ ?WH('Content-Length', Value, []);
+decode_content_length({short,Data}, _Version) ->
+ Value = d_long(Data),
+ ?WH('Content-Length', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Location: <uri-value>
+%% Ref: 8.4.2.21
+%% Type: single
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_content_location(Value) ->
+ ?WH('Content-Location', Value, []).
+
+format_content_location(H) ->
+ H#wsp_header.value.
+
+encode_content_location(H, _Version) ->
+ encode_uri_value(H#wsp_header.value).
+
+decode_content_location(Value, _Version) when list(Value) ->
+ ?WH('Content-Location', decode_uri_value(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Md5: <value-length> <digest>
+%% Ref: 8.4.2.22
+%% Type: single, end-to-end
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_content_md5(Value) ->
+ ?WH('Content-Md5', base64:decode(Value), []).
+
+format_content_md5(H) ->
+ base64:encode(H#wsp_header.value).
+
+encode_content_md5(H, _Version) ->
+ e_value(H#wsp_header.value).
+
+decode_content_md5({_,Data}, _Version) ->
+ ?WH('Content-Md5', Data, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Range: <first-byte-pos> <entity-len>
+%% Ref: 8.4.2.23
+%% Type: single
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_content_range(Value) ->
+ %% FIXME:
+ ?WH('Content-Range', Value, []).
+
+format_content_range(H) ->
+ {Pos,Len} = H#wsp_header.value,
+ if Len == "*" ->
+ ["bytes ", integer_to_list(Pos), "-*/*"];
+ true ->
+ ["bytes ", integer_to_list(Pos),"-",integer_to_list(Len-1),
+ "/", integer_to_list(Len)]
+ end.
+
+encode_content_range(H, _Version) ->
+ case H#wsp_header.value of
+ {Pos, "*"} ->
+ e_value(e_uintvar(Pos), <<128>>);
+ {Pos, Len} ->
+ e_value(e_uintvar(Pos), e_uintvar(Len))
+ end.
+
+decode_content_range({_, Data}, _Version) ->
+ {Pos, Data1} = d_uintvar(Data),
+ Len =
+ case Data1 of
+ <<128>> -> "*";
+ _ ->
+ {L, _} = d_uintvar(Data1),
+ L
+ end,
+ ?WH('Content-Range', {Pos,Len}, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Type:
+%% Ref: 8.4.2.24
+%% Type: single, end-to-end
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_content_type(Value) ->
+ case string:tokens(Value, ";") of
+ [Type | Ps] ->
+ Params = parse_params(Ps),
+ ?WH('Content-Type', Type, Params);
+ [] ->
+ ?WH('Content-Type', Value, [])
+ end.
+
+format_content_type(H) ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)].
+
+encode_content_type(H, Version) ->
+ case encode_params(H#wsp_header.params,Version) of
+ <<>> ->
+ encode_well_known_media(H#wsp_header.value, Version);
+ Params ->
+ Media = encode_well_known_media(H#wsp_header.value, Version),
+ e_value(Media, Params)
+ end.
+
+decode_content_type(Value,Version) when integer(Value) ->
+ ?WH('Content-Type', decode_well_known_media(Value,Version), []);
+decode_content_type(Value,Version) when list(Value) ->
+ ?WH('Content-Type', decode_well_known_media(Value,Version), []);
+decode_content_type({_, Data}, Version) ->
+ {Value,Data1} = scan_header_data(Data),
+ ContentType = if integer(Value) ->
+ decode_well_known_media(Value,Version);
+ list(Value) ->
+ decode_well_known_media(Value,Version);
+ true ->
+ {_,Data2} = Value,
+ decode_well_known_media(d_long(Data2),Version)
+ end,
+ Params = decode_params(Data1, Version),
+ ?WH('Content-Type', ContentType, Params).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Date: <http-date>
+%% Ref: 8.2.4.25
+%% Type: single, end-to-end
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_date(String) ->
+ {DateTime, _} = parse_http_date(String),
+ ?WH('Date', DateTime, []).
+
+format_date(H) ->
+ fmt_date(H#wsp_header.value).
+
+encode_date(H, _Version) ->
+ e_date(H#wsp_header.value).
+
+decode_date(Value, _Version) ->
+ ?WH('Date', d_date(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Etag: <text-string>
+%% Ref: 8.2.4.26
+%% Type: single, end-to-end
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_etag(Value) ->
+ ?WH('Etag', Value, []).
+
+format_etag(H) ->
+ H#wsp_header.value.
+
+encode_etag(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_etag(Value, _Version) ->
+ ?WH('Etag', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Expires: <date-value>
+%% Ref: 8.4.2.27
+%% Type: single, end-to-end, server-to-client
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_expires(String) ->
+ {DateTime, _} = parse_http_date(String),
+ ?WH('Expires', DateTime, []).
+
+format_expires(H) ->
+ fmt_date(H#wsp_header.value).
+
+encode_expires(H, _Version) ->
+ e_date(H#wsp_header.value).
+
+decode_expires(Value, _Version) ->
+ ?WH('Expires', d_date(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% From: <text-string>
+%% Ref: 8.4.2.28
+%% Type: single,
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_from(Value) ->
+ ?WH('From', Value, []).
+
+format_from(H) ->
+ H#wsp_header.value.
+
+encode_from(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_from(Value, _Version) ->
+ ?WH('From', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Host: <text-string>
+%% Ref: 8.4.2.29
+%% Type: single, end-to-end, client-to-server
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_host(Value) ->
+ ?WH('Host', Value, []).
+
+format_host(H) ->
+ H#wsp_header.value.
+
+encode_host(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_host(Value, _Version) ->
+ ?WH('Host', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% If-Modified-Since: <date-value>
+%% Ref: 8.4.2.30
+%% Type: single, end-to-end, client-to-server
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_if_modified_since(String) ->
+ {DateTime, _} = parse_http_date(String),
+ ?WH('If-Modified-Since', DateTime, []).
+
+format_if_modified_since(H) ->
+ fmt_date(H#wsp_header.value).
+
+encode_if_modified_since(H, _Version) ->
+ e_date(H#wsp_header.value).
+
+decode_if_modified_since(Value, _Version) ->
+ ?WH('If-Modified-Since', d_date(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% If-Match: <text-string>
+%% Ref: 8.4.2.31
+%% Type: end-to-end, client-to-server
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_if_match(Value) ->
+ ?WH('If-Match', Value, []).
+
+format_if_match(H) ->
+ H#wsp_header.value.
+
+encode_if_match(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_if_match(Value, _Version) ->
+ ?WH('If-Match', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% If-None-Match: <text-string>
+%% Ref: 8.4.2.32
+%% Type: end-to-end, client-to-server
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_if_none_match(Value) ->
+ ?WH('If-None-Match', Value, []).
+
+format_if_none_match(H) ->
+ H#wsp_header.value.
+
+encode_if_none_match(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_if_none_match(Value, _Version) ->
+ ?WH('If-None-Match', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% If-Range: Text | Date
+%% Ref: 8.4.2.33
+%% Type: end-to-end, client-to-server
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_if_range(Value) ->
+ case catch parse_http_date(Value) of
+ {'EXIT', _} ->
+ ?WH('If-Range', Value, []);
+ {DateTime,_} ->
+ ?WH('If-Range', DateTime, [])
+ end.
+
+
+format_if_range(H) ->
+ case H#wsp_header.value of
+ Value when list(Value) -> Value;
+ DateTime -> fmt_date(DateTime)
+ end.
+
+encode_if_range(H, _Version) ->
+ case H#wsp_header.value of
+ Value when list(Value) ->
+ encode_text_string(Value);
+ DateTime ->
+ e_date(DateTime)
+ end.
+
+decode_if_range(Value, _Version) when list(Value) ->
+ ?WH('If-Range', decode_text_string(Value), []);
+decode_if_range(Value, _Version) ->
+ ?WH('If-Range', d_date(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% If-Unmodified-Since: <date-value>
+%% Ref: 8.4.2.34
+%% Type: single, end-to-end, client-to-server
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_if_unmodified_since(String) ->
+ {DateTime, _} = parse_http_date(String),
+ ?WH('If-Unmodified-Since', DateTime, []).
+
+format_if_unmodified_since(H) ->
+ fmt_date(H#wsp_header.value).
+
+encode_if_unmodified_since(H, _Version) ->
+ e_date(H#wsp_header.value).
+
+decode_if_unmodified_since(Value, _Version) ->
+ ?WH('If-Unmodified-Since', d_date(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Location: <uri-value>
+%% Ref: 8.4.2.36
+%% Type: single, end-to-end, server-to-client
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_location(Value) ->
+ ?WH('Location', Value, []).
+
+format_location(H) ->
+ H#wsp_header.value.
+
+encode_location(H, _Version) ->
+ encode_uri_value(H#wsp_header.value).
+
+decode_location(Value, _Version) when list(Value) ->
+ ?WH('Location', decode_uri_value(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Last-Modified: <date-value>
+%% Ref: 8.4.2.35
+%% Type: single, end-to-end, server-to-client
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_last_modified(String) ->
+ {DateTime, _} = parse_http_date(String),
+ ?WH('Last-Modified', DateTime, []).
+
+format_last_modified(H) ->
+ fmt_date(H#wsp_header.value).
+
+encode_last_modified(H, _Version) ->
+ e_date(H#wsp_header.value).
+
+decode_last_modified(Value, _Version) ->
+ ?WH('Last-Modified', d_date(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Max-Forwards: <integer-value>
+%% Ref: 8.4.2.37
+%% Type: single, end-to-end, server-to-client
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_max_forwards(String) ->
+ ?WH('Max-Forwards', list_to_integer(String), []).
+
+format_max_forwards(H) ->
+ integer_to_list(H#wsp_header.value).
+
+encode_max_forwards(H, _Version) ->
+ encode_integer(H#wsp_header.value).
+
+decode_max_forwards(Value, _Version) ->
+ decode_integer(Value).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Pragma: No-Cache | value-length Parameter
+%% Ref:
+%% Type:
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_pragma(Value) ->
+ ?WH('Pragma',Value,[]).
+
+format_pragma(H) ->
+ case H#wsp_header.value of
+ "" -> format_params(H#wsp_header.params);
+ Value -> Value
+ end.
+
+encode_pragma(H, Version) ->
+ case H#wsp_header.value of
+ "no-cache" -> ?ENCODE_SHORT(0);
+ "" ->
+ encode_parameter(hd(H#wsp_header.params), Version)
+ end.
+
+decode_pragma(0, _Version) ->
+ ?WH('Pragma',"no-cache",[]);
+decode_pragma({_,Data}, Version) ->
+ {Param,_} = decode_parameter(Data, Version),
+ ?WH('Pragma',"",[Param]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Proxy-Authenticate:
+%% Ref: 8.4.2.39
+%% Type: single?, client-to-proxy
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_proxy_authenticate(Value) ->
+ parse_challenge('Proxy-Authenticate', Value).
+
+format_proxy_authenticate(H) ->
+ format_challenge(H#wsp_header.value, H#wsp_header.params).
+
+encode_proxy_authenticate(H, Version) ->
+ encode_challenge(H#wsp_header.value,
+ H#wsp_header.params, Version).
+
+decode_proxy_authenticate({_, Data}, Version) ->
+ decode_challenge('Proxy-Authenticate', Data, Version).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Proxy-authorization:
+%% Ref: 8.4.2.40
+%% Type: single?, proxy-to-client
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_proxy_authorization(Value) ->
+ parse_credentials('Proxy-Authorization', Value).
+
+format_proxy_authorization(H) ->
+ format_credentials(H#wsp_header.value, H#wsp_header.params).
+
+encode_proxy_authorization(H, Version) ->
+ encode_credentials(H#wsp_header.value, H#wsp_header.params, Version).
+
+decode_proxy_authorization({_,Data}, Version) ->
+ decode_credentials('Proxy-Authorization', Data, Version).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Public: <well-known-method> | Token-Text
+%% Ref: 8.4.2.41
+%% Type:
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_public(Value) ->
+ ?WH('Public', parse_well_known_method(Value), []).
+
+format_public(H) ->
+ if atom(H#wsp_header.value) ->
+ atom_to_list(H#wsp_header.value);
+ list(H#wsp_header.value) ->
+ H#wsp_header.value
+ end.
+
+encode_public(H, Version) ->
+ if atom(H#wsp_header.value) ->
+ encode_well_known_method(H#wsp_header.value,Version);
+ list(H#wsp_header.value) ->
+ encode_text_string(H#wsp_header.value)
+ end.
+
+decode_public(Value, _Version) when list(Value) ->
+ ?WH('Public', Value, []);
+decode_public(Value, Version) ->
+ ?WH('Public', decode_well_known_method(Value,Version), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Range:
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_range(Value) ->
+ %% FIXME:
+ ?WH('Range', Value, []).
+
+format_range(H) ->
+ case H#wsp_header.value of
+ {First,undefined} ->
+ ["bytes=", integer_to_list(First), "-"];
+ {First,Last} ->
+ ["bytes=", integer_to_list(First), "-", integer_to_list(Last)];
+ Len when integer(Len) ->
+ ["bytes=-", integer_to_list(Len)]
+ end.
+
+encode_range(H, _Version) ->
+ case H#wsp_header.value of
+ {First,undefined} ->
+ e_value(?ENCODE_SHORT(0),
+ e_uintvar(First));
+ {First,Last} ->
+ e_value(?ENCODE_SHORT(0),
+ e_uintvar(First),
+ e_uintvar(Last));
+ Len when integer(Len) ->
+ e_value(?ENCODE_SHORT(1),
+ e_uintvar(Len))
+ end.
+
+decode_range({_,Data}, _Version) ->
+ case scan_header_data(Data) of
+ {0, Data1} ->
+ case d_uintvar(Data1) of
+ {First, <<>>} ->
+ ?WH('Range', {First, undefined},[]);
+ {First, Data2} ->
+ {Last, _} = d_uintvar(Data2),
+ ?WH('Range', {First, Last}, [])
+ end;
+ {1, Data1} ->
+ {Len, _} =d_uintvar(Data1),
+ ?WH('Range', Len, [])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Referer: <uri-value>
+%% Ref: 8.4.2.43
+%% Type: single
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_referer(Value) ->
+ ?WH('Referer', Value, []).
+
+format_referer(H) ->
+ H#wsp_header.value.
+
+encode_referer(H, _Version) ->
+ encode_uri_value(H#wsp_header.value).
+
+decode_referer(Value, _Version) when list(Value) ->
+ ?WH('Referer', decode_uri_value(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Retry-After: Value-length (Retry-date-value | Retry-delta-seconds)
+%% Ref: 8.4.2.44
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_retry_after(Value) ->
+ case catch parse_http_date(Value) of
+ {'EXIT', _} ->
+ ?WH('Retry-After', list_to_integer(Value), []);
+ {DateTime,_} ->
+ ?WH('Retry-After', DateTime, [])
+ end.
+
+format_retry_after(H) ->
+ Value = H#wsp_header.value,
+ if integer(Value) ->
+ integer_to_list(Value);
+ true ->
+ fmt_date(Value)
+ end.
+
+encode_retry_after(H, _Version) ->
+ Value = H#wsp_header.value,
+ if integer(Value) ->
+ e_value(?ENCODE_SHORT(1),
+ e_delta_seconds(Value));
+ true ->
+ e_value(?ENCODE_SHORT(0),
+ e_date(Value))
+ end.
+
+decode_retry_after({_,Data}, _Version) ->
+ case scan_header_data(Data) of
+ {0, Data1} ->
+ ?WH('Retry-After', d_date(Data1), []);
+ {1, Data1} ->
+ case scan_header_data(Data1) of
+ Sec when integer(Sec) ->
+ ?WH('Retry-After', Sec, []);
+ {short,Data2} ->
+ ?WH('Retry-After', d_long(Data2), [])
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Server: <text-string>
+%% Ref: 8.4.2.45
+%% Type: server-to-client
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_server(Value) ->
+ ?WH('Server', Value, []).
+
+format_server(H) ->
+ H#wsp_header.value.
+
+encode_server(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_server(Value, _Version) ->
+ ?WH('Server', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Transfer-Encoding:
+%% Ref: 8.4.2.46
+%% Type: hop-by-hop
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_transfer_encoding(Value) ->
+ ?WH('Transfer-Encoding', Value, []).
+
+format_transfer_encoding(H) ->
+ H#wsp_header.value.
+
+encode_transfer_encoding(H, _Version) ->
+ case H#wsp_header.value of
+ "chunked" -> ?ENCODE_SHORT(0);
+ Value -> encode_text_string(Value)
+ end.
+
+decode_transfer_encoding(0, _Version) ->
+ ?WH('Transfer-Encoding', "chunked", []);
+decode_transfer_encoding(Value, _Version) when list(Value)->
+ ?WH('Transfer-Encoding', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Upgrade: Text-String
+%% Ref: 8.4.2.47
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_upgrade(Value) ->
+ ?WH('Upgrade', Value, []).
+
+format_upgrade(H) ->
+ H#wsp_header.value.
+
+encode_upgrade(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_upgrade(Value, _Version) when list(Value) ->
+ ?WH('Upgrade', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% User-Agent:
+%% Ref: 8.4.2.48
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_user_agent(Value) ->
+ ?WH('User-Agent', Value, []).
+
+format_user_agent(H) ->
+ H#wsp_header.value.
+
+encode_user_agent(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_user_agent(Value, _Version) ->
+ ?WH('User-Agent', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Vary: Well-known-header-field | Token-text
+%% Ref: 8.4.2.49
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_vary(Value) ->
+ ?WH('Vary', normalise_field_name(Value), []).
+
+format_vary(H) ->
+ to_list(H#wsp_header.value).
+
+encode_vary(H, Version) ->
+ e_field_name(H#wsp_header.value, Version).
+
+decode_vary(Value, _Version) when integer(Value) ->
+ ?WH('Vary', lookup_field_name(Value), []);
+decode_vary(Value, _Version) when list(Value) ->
+ ?WH('Vary', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Via: <text-string>
+%% Ref: 8.4.2.50
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_via(Value) ->
+ ?WH('Via', Value, []).
+
+format_via(H) ->
+ H#wsp_header.value.
+
+encode_via(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_via(Value, _Version) when list(Value) ->
+ ?WH('Via', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Warning: Warn-Code | Warning-value
+%% Ref: 8.4.2.51
+%% Type: general, multiple
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_warning(Value) ->
+ case string:tokens(Value, " ") of
+ [Code] ->
+ ?WH('Warning', {list_to_integer(Code),"",""}, []);
+ [Code,Agent,Text] ->
+ ?WH('Warning', {list_to_integer(Code), Agent, Text}, [])
+ end.
+
+format_warning(H) ->
+ case H#wsp_header.value of
+ {Code, "", ""} ->
+ integer_to_list(Code);
+ {Code, Agent, Text} ->
+ [integer_to_list(Code), " ", Agent, " ", Text]
+ end.
+
+encode_warning(H, _Version) ->
+ case H#wsp_header.value of
+ {Code,"",""} ->
+ ?ENCODE_SHORT(Code);
+ {Code, Agent, Text} ->
+ e_value(?ENCODE_SHORT(Code),
+ encode_text_string(Agent),
+ encode_text_string(Text))
+ end.
+
+decode_warning(Value, _Version) when integer(Value) ->
+ ?WH('Warning', {Value, "", ""}, []);
+decode_warning({_, Data}, _Version) ->
+ {Code,Data1}= scan_header_data(Data),
+ {Agent,Data2} = d_text_string(Data1),
+ {Text,_Data3} = d_text_string(Data2),
+ ?WH('Warning', {Code,Agent,Text}, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% WWW-Authenticate: challenge
+%% Ref: 8.4.2.52
+%% Type: single? client-to-server
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_www_authenticate(Value) ->
+ parse_challenge('Www-Authenticate', Value).
+
+format_www_authenticate(H) ->
+ format_challenge(H#wsp_header.value, H#wsp_header.params).
+
+encode_www_authenticate(H, Version) ->
+ encode_challenge(H#wsp_header.value,
+ H#wsp_header.params, Version).
+
+decode_www_authenticate({_, Data}, Version) ->
+ decode_challenge('Www-Authenticate', Data, Version).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Disposition: "form-data" | "attachment" [<param>]*
+%% Ref: 8.4.2.53
+%% Type: single
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_content_disposition(Value) ->
+ ?WH('Content-Disposition', Value, []).
+
+format_content_disposition(H) ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)].
+
+encode_content_disposition(H, Version) ->
+ case H#wsp_header.value of
+ "form-data" ->
+ e_value(?ENCODE_SHORT(0),
+ encode_params(H#wsp_header.params, Version));
+ "attachment" ->
+ e_value(?ENCODE_SHORT(1),
+ encode_params(H#wsp_header.params, Version))
+ end.
+
+decode_content_disposition({_,Data}, Version) when binary(Data) ->
+ case scan_header_data(Data) of
+ {0, Data1} ->
+ Params = decode_params(Data1, Version),
+ ?WH('Content-Disposition', "form-data", Params);
+ {1, Data1} ->
+ Params = decode_params(Data1, Version),
+ ?WH('Content-Disposition', "attachment", Params)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% X-Wap-Application-Id:
+%% Ref: 8.4.2.54
+%% Type:
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_x_wap_application_id(Value) ->
+ ?WH('X-Wap-Application-Id', Value, []).
+
+format_x_wap_application_id(H) ->
+ H#wsp_header.value.
+
+encode_x_wap_application_id(H, _Version) ->
+ encode_push_application(H#wsp_header.value).
+
+decode_x_wap_application_id(Value, _Version) ->
+ ?WH('X-Wap-Application-Id', decode_push_application(Value),[]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% X-Wap-Content-Uri: <uri-value>
+%% Ref: 8.4.2.55
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_x_wap_content_uri(Value) ->
+ ?WH('X-Wap-Content-Uri', Value, []).
+
+format_x_wap_content_uri(H) ->
+ H#wsp_header.value.
+
+encode_x_wap_content_uri(H, _Version) ->
+ encode_uri_value(H#wsp_header.value).
+
+decode_x_wap_content_uri(Value, _Version) when list(Value) ->
+ ?WH('X-Wap-Content-Uri', decode_uri_value(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% X-Wap-Initiator-Uri: <uri-value>
+%% Ref: 8.4.2.56
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_x_wap_initiator_uri(Value) ->
+ ?WH('X-Wap-Initiator-Uri', Value, []).
+
+format_x_wap_initiator_uri(H) ->
+ H#wsp_header.value.
+
+encode_x_wap_initiator_uri(H, _Version) ->
+ encode_uri_value(H#wsp_header.value).
+
+decode_x_wap_initiator_uri(Value, _Version) when list(Value) ->
+ ?WH('X-Wap-Initiator-Uri', decode_uri_value(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Accept-Application: Any-Application | Appication-Id-Value
+%% Ref: 8.4.2.57
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_accept_application(Value) ->
+ ?WH('Accept-Application', Value, []).
+
+format_accept_application(H) ->
+ H#wsp_header.value.
+
+
+encode_accept_application(H, _Version) ->
+ case H#wsp_header.value of
+ "*" -> ?ENCODE_SHORT(0);
+ Value ->
+ case catch encode_push_application(Value) of
+ {'EXIT',_} ->
+ encode_uri_value(Value);
+ App ->
+ encode_integer(App)
+ end
+ end.
+
+decode_accept_application(0, _Version) ->
+ ?WH('Accept-Application', "*", []);
+decode_accept_application(Value, _Version) when integer(Value) ->
+ ?WH('Accept-Application', decode_push_application(Value), []);
+decode_accept_application({short,Data}, _Version) ->
+ Value = d_long(Data),
+ ?WH('Accept-Application', decode_push_application(Value), []);
+decode_accept_application(Value, _Version) when list(Value) ->
+ ?WH('Accept-Application', decode_uri_value(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Bearer-Indication: <integer-value>
+%% Type: sinlge
+%% Ref: 8.4.2.58
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_bearer_indication(Value) ->
+ ?WH('Bearer-Indication', Value, []).
+
+format_bearer_indication(H) ->
+ integer_to_list(H#wsp_header.value).
+
+encode_bearer_indication(H, _Version) ->
+ encode_integer(H#wsp_header.value).
+
+decode_bearer_indication(Value, _Version) when integer(Value) ->
+ ?WH('Bearer-Indication', Value, []);
+decode_bearer_indication({short,Data}, _Version) ->
+ Value = d_long(Data),
+ ?WH('Bearer-Indication', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Push-Flag: Short-Integer
+%% Type: single
+%% Ref: 8.4.2.59
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_push_flag(Value) ->
+ ?WH('Push-Flag', integer_to_list(Value), []).
+
+format_push_flag(H) ->
+ integer_to_list(H#wsp_header.value).
+
+encode_push_flag(H, _Version) ->
+ ?ENCODE_SHORT(H#wsp_header.value).
+
+decode_push_flag(Value, _Version) when integer(Value) ->
+ ?WH('Push-Flag', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Profile: <uri-value>
+%% Ref: 8.4.2.60
+%% Type: single, hop-by-hop, client-to-proxy
+%%
+%% Note: Normally transfered as 'X-Wap-Profile'
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_profile(Value) ->
+ ?WH('Profile', Value, []).
+
+format_profile(H) ->
+ H#wsp_header.value.
+
+encode_profile(H, _Version) ->
+ encode_uri_value(H#wsp_header.value).
+
+decode_profile(Value, _Version) ->
+ ?WH('Profile', decode_uri_value(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Profile-Diff: Value-Length Octets
+%% Ref: 8.4.2.61
+%% Type: single, hop-by-hop, client-to-proxy
+%%
+%% Value is WBXML encoded profile diff information
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_profile_diff(Value) ->
+ %% FIXME parse XML code?
+ ?WH('Profile-Diff', Value, []).
+
+format_profile_diff(_H) ->
+ %% FIXME emit ???
+ "WBXML".
+
+encode_profile_diff(H, _Version) ->
+ e_value(H#wsp_header.value).
+
+decode_profile_diff({_,Value}, _Version) ->
+ ?WH('Profile-Diff', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Profile-Warning: Code
+%% Ref: 8.4.2.62
+%% Type: single
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_profile_warning(Value) ->
+ ?WH('Profile-Warning', {Value,"",undefined}, []).
+
+format_profile_warning(H) ->
+ {Code,Target,Date} = H#wsp_header.value,
+ CodeData = integer_to_list(Code),
+ if Target == "", Date == undefined ->
+ CodeData;
+ Date == undefined ->
+ [CodeData," ",Target];
+ true ->
+ [CodeData," ",Target," ",format_date(Date)]
+ end.
+
+
+encode_profile_warning(H, _Version) ->
+ {Code,Target,Date} = H#wsp_header.value,
+ CodeData = case Code of
+ 100 -> ?ENCODE_SHORT(16#10);
+ 101 -> ?ENCODE_SHORT(16#11);
+ 102 -> ?ENCODE_SHORT(16#12);
+ 200 -> ?ENCODE_SHORT(16#20);
+ 201 -> ?ENCODE_SHORT(16#21);
+ 202 -> ?ENCODE_SHORT(16#22);
+ 203 -> ?ENCODE_SHORT(16#23)
+ end,
+ if Target == "", Date == undefined ->
+ CodeData;
+ Date == undefined ->
+ e_value(CodeData, encode_text_string(Target));
+ true ->
+ e_value(CodeData, encode_text_string(Target), e_date(Date))
+ end.
+
+
+decode_profile_warning(Value, _Version) when integer(Value) ->
+ Code = case Value of
+ 16#10 -> 100;
+ 16#11 -> 101;
+ 16#12 -> 102;
+ 16#20 -> 200;
+ 16#21 -> 201;
+ 16#22 -> 202;
+ 16#23 -> 203
+ end,
+ ?WH('Profile-Warning', {Code,"",undefined}, []);
+decode_profile_warning({_, <<1:1, Value:7, Data>>}, _Version) ->
+ Code = case Value of
+ 16#10 -> 100;
+ 16#11 -> 101;
+ 16#12 -> 102;
+ 16#20 -> 200;
+ 16#21 -> 201;
+ 16#22 -> 202;
+ 16#23 -> 203
+ end,
+ {Target,Data1} = d_text_string(Data),
+ Date =
+ if Data1 == <<>> ->
+ undefined;
+ true ->
+ {DateValue,_} = scan_header_data(Data1),
+ d_date(DateValue)
+ end,
+ ?WH('Profile-Warning', {Code,Target,Date}, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Expect: 100-contine | Expect-expression
+%% Ref: 8.4.2.63
+%% Type: client-to-server
+%% Note: Bug in the spec value-length is missing !!!
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_expect(Value) ->
+ ?WH('Expect', Value, []).
+
+format_expect(H) ->
+ case H#wsp_header.value of
+ {Var,Val} ->
+ [Var,"=",Val, format_params(H#wsp_header.params)];
+ Val when list(Val) ->
+ Val
+ end.
+
+encode_expect(H, Version) ->
+ case H#wsp_header.value of
+ "100-continue" ->
+ ?ENCODE_SHORT(0);
+ {Var,Val} ->
+ e_value(encode_text_string(Var),
+ encode_text_string(Val),
+ encode_params(H#wsp_header.params,Version))
+ end.
+
+decode_expect(0, _Version) ->
+ ?WH('Expect', "100-continue", []);
+decode_expect({_, Data}, Version) ->
+ {Var, Data1} = d_text_string(Data),
+ {Val, Data2} = d_text_string(Data1),
+ Params = decode_params(Data2, Version),
+ ?WH('Expect', {decode_text_string(Var),
+ decode_text_string(Val)}, Params).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Te: Trailers | TE-General-From
+%% Ref: 8.4.2.64
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_te(Value) ->
+ ?WH('Te', Value, []).
+
+format_te(H) ->
+ [H#wsp_header.value, format_params(H#wsp_header.params)].
+
+encode_te(H, Version) ->
+ case H#wsp_header.value of
+ "trailers" -> ?ENCODE_SHORT(1);
+ "chunked" ->
+ e_value(?ENCODE_SHORT(2),
+ encode_params(H#wsp_header.params,Version));
+ "identity" ->
+ e_value(?ENCODE_SHORT(3),
+ encode_params(H#wsp_header.params,Version));
+ "gzip" ->
+ e_value(?ENCODE_SHORT(4),
+ encode_params(H#wsp_header.params,Version));
+ "compress" ->
+ e_value(?ENCODE_SHORT(5),
+ encode_params(H#wsp_header.params,Version));
+ "deflate" ->
+ e_value(?ENCODE_SHORT(6),
+ encode_params(H#wsp_header.params,Version));
+ Value ->
+ e_value(encode_text_string(Value),
+ encode_params(H#wsp_header.params,Version))
+ end.
+
+decode_te(1, _Version) ->
+ ?WH('Te', "trailers", []);
+decode_te({_, Data}, _Version) ->
+ {Val, Data1} = scan_header_data(Data),
+ Value =
+ case Val of
+ 2 -> "chunked";
+ 3 -> "identity";
+ 4 -> "gzip";
+ 5 -> "compress";
+ 6 -> "deflate";
+ V when list(V) -> V
+ end,
+ Params = case Data1 of
+ <<>> -> [];
+ <<128, QData>> ->
+ {QValue, _} = d_q_value(QData),
+ [{q, QValue}]
+ end,
+ ?WH('Te', Value, Params).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Trailer: Well-known-header-field | Token-text
+%% Ref: 8.4.2.65
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_trailer(Value) ->
+ ?WH('Trailer', normalise_field_name(Value), []).
+
+format_trailer(H) ->
+ to_list(H#wsp_header.value).
+
+encode_trailer(H, Version) ->
+ e_field_name(H#wsp_header.value, Version).
+
+decode_trailer(Value, _Version) when integer(Value) ->
+ ?WH('Trailer', lookup_field_name(Value), []);
+decode_trailer(Value, _Version) when list(Value) ->
+ ?WH('Trailer', Value, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% X-Wap-Tod:
+%% Ref: 8.4.2.66
+%% Type: hop-by-hop
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_x_wap_tod(String) ->
+ {DateTime, _} = parse_http_date(String),
+ ?WH('X-Wap-Tod', DateTime, []).
+
+format_x_wap_tod(H) ->
+ fmt_date(H#wsp_header.value).
+
+encode_x_wap_tod(H, _Version) ->
+ e_date(H#wsp_header.value).
+
+decode_x_wap_tod(Value, _Version) ->
+ ?WH('X-Wap-Tod', d_date(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Content-Id: <quoted-string>
+%% Type:
+%% Ref: 8.4.2.67
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_content_id(Value) ->
+ ?WH('Content-Id', Value, []).
+
+format_content_id(H) ->
+ [$", H#wsp_header.value, $"].
+
+encode_content_id(H, _Version) ->
+ encode_quoted_string(H#wsp_header.value).
+
+decode_content_id(Value, _Version) when list(Value) ->
+ ?WH('Content-Id', decode_quoted_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Set-Cookie: <len> <cookie-version> <cookie-name> <cokie-value> <parm>*
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_set_cookie(String) ->
+ %% MEGA FIXME; Cookie-value may be a quoted string and
+ %% contain both ,=; etc Fix several cookies on same line!!
+ case string:tokens(String, ";") of
+ [Cookie | Ps] ->
+ case string:tokens(Cookie, "=") of
+ [Name,Value] ->
+ Params = parse_params(Ps),
+ ?WH('Set-Cookie', {{1,0}, Name, Value}, Params);
+ [Name] ->
+ Params = parse_params(Ps),
+ ?WH('Set-Cookie', {{1,0}, Name, ""}, Params)
+ end;
+ [] ->
+ ?WH('Set-Cookie', {{1,0}, String, ""}, [])
+ end.
+
+format_set_cookie(H) ->
+ case H#wsp_header.value of
+ {{1,0},Name,Value} ->
+ [Name, "=", Value,format_params(H#wsp_header.params)];
+ {Version,Name,Value} ->
+ [format_version(Version)," ",
+ Name, "=", Value,
+ format_params(H#wsp_header.params)]
+ end.
+
+encode_set_cookie(H, Version) ->
+ {CookieVersion,Name,Value} = H#wsp_header.value,
+ e_value(encode_version(CookieVersion),
+ encode_text_string(Name),
+ encode_text_string(Value),
+ encode_params(H#wsp_header.params, Version)).
+
+decode_set_cookie({_, Data}, Version) ->
+ {CookieVersion, Data1} = scan_header_data(Data),
+ {CookieName, Data2} = scan_header_data(Data1),
+ {CookieValue, Data3} = scan_header_data(Data2),
+ Params = decode_params(Data3, Version),
+ ?WH('Set-Cookie', {decode_version(CookieVersion),
+ decode_text_string(CookieName),
+ decode_text_string(CookieValue)}, Params).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Cookie:
+%% Ref: 8.4.2.69
+%% Type: single?, client-to-server
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_cookie(Value) ->
+ %% FIXME parse cookie version etc
+ ?WH('Cookie', {{1,0},Value}, []).
+
+format_cookie(H) ->
+ case H#wsp_header.value of
+ {{1,0}, Cookies} ->
+ lists:map(fun({Name,Value,Ps}) ->
+ [Name,"=",Value, format_params(Ps)]
+ end, Cookies);
+ {Version, Cookies} ->
+ [format_version(Version)," ",
+ lists:map(fun({Name,Value,Ps}) ->
+ [Name,"=",Value, format_params(Ps)]
+ end, Cookies)]
+ end.
+
+encode_cookie(H, Version) ->
+ {Version, Cookies} = H#wsp_header.value,
+ e_value(encode_version(Version),
+ encode_cookies(Cookies, [])).
+
+encode_cookies([{Name,Value,Ps} | Cs], Acc) ->
+ List =
+ [encode_text_string(Name),
+ encode_text_string(Value) |
+ case Ps of
+ [{path,P},{domain,D}] ->
+ [encode_text_string(P), encode_text_string(D)];
+ [{domain,D},{path,P}] ->
+ [encode_text_string(P), encode_text_string(D)];
+ [{path,P}] ->
+ [encode_text_string(P)];
+ [{domain,D}] ->
+ [encode_text_string(""), encode_text_string(D)];
+ [] ->
+ []
+ end],
+ Sz = lists:sum(lists:map(fun(B) -> size(B) end, List)),
+ encode_cookies(Cs, [[e_uintvar(Sz) | List] | Acc]);
+encode_cookies([], Acc) ->
+ list_to_binary(lists:reverse(Acc)).
+
+
+decode_cookie({_, Data}, _Version) ->
+ {CookieVersion, Data1} = scan_header_data(Data),
+ Cookies = decode_cookies(Data1, []),
+ ?WH('Cookie', {decode_version(CookieVersion), Cookies}, []).
+
+decode_cookies(<<>>, Acc) ->
+ lists:reverse(Acc);
+decode_cookies(Data0, _Acc) -> %% IS IGNORING Acc A BUG OR NOT ?
+ {Len, Data1} = d_uintvar(Data0),
+ <<C0:Len/binary, Data2/binary>> = Data1,
+ {Name, C1} = scan_header_data(C0),
+ {Value, C2} = scan_header_data(C1),
+ {Ps1, C3} =
+ case d_text_string(C2) of
+ {"", C21} -> {[], C21};
+ {Path,C21} -> {[{path,Path}], C21}
+ end,
+ {Ps2, _} =
+ case C3 of
+ <<>> -> {[], <<>>};
+ _ ->
+ {Domain,C4} = d_text_string(C3),
+ {[{domain,Domain}], C4}
+ end,
+ decode_cookies(Data2, [{decode_text_string(Name),
+ decode_text_string(Value),
+ Ps1++Ps2}]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Encoding-Version: Version-Value | Value-length Code-Page [Version-Value]
+%% Ref: 8.4.2.70
+%% Type: single, hop-by-hop, client-and-proxys
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_encoding_version(Value) ->
+ ?WH('Encoding-Version', parse_version(Value), []).
+
+format_encoding_version(H) ->
+ format_version(H#wsp_header.value).
+
+encode_encoding_version(H, _Version) ->
+ encode_version(H#wsp_header.value).
+
+decode_encoding_version(Value, _Version) when integer(Value) ->
+ ?WH('Encoding-Version', decode_version(Value), []);
+decode_encoding_version(Value, _Version) when list(Value) ->
+ %% Note: in this case we parse the Value since we
+ %% Must know the Encoding version
+ ?WH('Encoding-Version', parse_version(Value), []);
+decode_encoding_version({_,<<_:1,_CodePage:7>>}, _Version) ->
+ %% ??? FIXME
+ ?WH('Encoding-Version', "", []);
+decode_encoding_version({_,<<_:1,_CodePage:7, Data1/binary>>}, _Version) ->
+ {Value,_Data2} = scan_header_data(Data1),
+ %% FIXME CodePage
+ ?WH('Encoding-Version', decode_version(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% X-Wap-Security:
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_x_wap_security(Value) ->
+ ?WH('X-Wap-Security', Value, []).
+
+format_x_wap_security(H) ->
+ H#wsp_header.value.
+
+encode_x_wap_security(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_x_wap_security(Value, _Version) ->
+ ?WH('X-Wap-Security', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% X-Wap-Loc-Invocation:
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_x_wap_loc_invocation(Value) ->
+ ?WH('X-Wap-Loc-Invocation', Value, []).
+
+format_x_wap_loc_invocation(H) ->
+ H#wsp_header.value.
+
+encode_x_wap_loc_invocation(H, _Version) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_x_wap_loc_invocation(Value, _Version) ->
+ ?WH('X-Wap-Loc-Invocation', decode_text_string(Value), []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% X-Wap-Loc-Delivery:
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+parse_x_wap_loc_delivery(Value) ->
+ ?WH('X-Wap-Loc-Delivery', Value, []).
+
+format_x_wap_loc_delivery(H) ->
+ H#wsp_header.value.
+
+encode_x_wap_loc_delivery(H, _Value) ->
+ encode_text_string(H#wsp_header.value).
+
+decode_x_wap_loc_delivery(Value, _Version) ->
+ ?WH('X-Wap-Loc-Delivery', decode_text_string(Value), []).
+
+
+%%
+%% Header Field parameters
+%%
+
+parse_params([Param|Ps]) ->
+ case string:tokens(Param, "=") of
+ [Name,Value0] ->
+ Val = trim(Value0),
+ P = case trim(tolower(Name)) of
+ "q" ->{q,Val};
+ "charset" -> {charset,Val};
+ "level" -> {level,Val};
+ "type" -> {type,Val};
+ "name" -> {name,Val};
+ "filename" -> {filename,Val};
+ "differences" -> {differences,Val};
+ "padding" -> {padding,Val};
+ "start" -> {start,Val};
+ "start-info" -> {'start-info',Val};
+ "comment" -> {comment,Val};
+ "domain" -> {domain,Val};
+ "max-age" -> {'max-age',Val};
+ "path" -> {path,Val};
+ "secure" -> {secure,no_value};
+ "sec" -> {sec, Val};
+ "mac" -> {mac, Val};
+ "creation-date" -> {'creation-date', Val};
+ "modification-date" -> {'modification-date', Val};
+ "read-date" -> {'read-date', Val};
+ "size" -> {size, Val};
+ Nm -> {Nm, Val}
+ end,
+ [P | parse_params(Ps)];
+ _ ->
+ parse_params(Ps)
+ end;
+parse_params([]) ->
+ [].
+
+%% format Params without leading ";"
+format_params0([{Param,no_value}|Ps]) ->
+ [to_list(Param) | format_params(Ps)];
+format_params0([{Param,Value}|Ps]) ->
+ [to_list(Param),"=",to_list(Value) | format_params(Ps)].
+
+format_params(Ps) ->
+ lists:map(fun({Param,no_value}) ->
+ ["; ", to_list(Param)];
+ ({Param,Value})->
+ ["; ", to_list(Param),"=",to_list(Value)]
+ end, Ps).
+
+
+encode_params(Params, Version) ->
+ list_to_binary(encode_params1(Params,Version)).
+
+encode_params1([Param|Ps], Version) ->
+ [ encode_parameter(Param, Version) | encode_params1(Ps, Version)];
+encode_params1([], _Version) ->
+ [].
+
+
+decode_params(Data, Version) ->
+ decode_params(Data, [], Version).
+
+decode_params(<<>>, Ps, _Version) ->
+ lists:reverse(Ps);
+decode_params(Data, Ps, Version) ->
+ {ParamVal, Data1} = decode_parameter(Data, Version),
+ decode_params(Data1, [ParamVal | Ps], Version).
+
+
+
+
+encode_parameter({ParamName, ParamValue}, Ver) ->
+ case ParamName of
+ q when Ver >= 16#01 ->
+ <<1:1, 16#00:7,
+ (encode_typed_field(Ver,'Q-value', ParamValue))/binary>>;
+ charset when Ver >= 16#01 ->
+ <<1:1, 16#01:7,
+ (encode_typed_field(Ver,'Well-known-charset',ParamValue))/binary>>;
+ level when Ver >= 16#01 ->
+ <<1:1, 16#02:7,
+ (encode_typed_field(Ver,'Ver-value',ParamValue))/binary>>;
+
+ type when Ver >= ?WSP_12 ->
+ <<1:1, 16#09:7,
+ (encode_typed_field(Ver,'Constrained-encoding',ParamValue))/binary>>;
+ type when Ver >= 16#01 ->
+ <<1:1, 16#03:7,
+ (encode_typed_field(Ver,'Integer-value',ParamValue))/binary>>;
+
+ name when Ver >= ?WSP_14 ->
+ <<1:1, 16#17:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ name when Ver >= 16#01 ->
+ <<1:1, 16#05:7,
+ (encode_typed_field(Ver,'Text-string',ParamValue))/binary>>;
+
+ filename when Ver >= ?WSP_14 ->
+ <<1:1, 16#18:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ filename when Ver >= 16#01 ->
+ <<1:1, 16#06:7,
+ (encode_typed_field(Ver,'Text-string',ParamValue))/binary>>;
+
+ differences when Ver >= 16#01 ->
+ <<1:1, 16#07:7,
+ (encode_typed_field(Ver,'Field-name',ParamValue))/binary>>;
+
+ padding when Ver >= 16#01 ->
+ <<1:1, 16#08:7,
+ (encode_typed_field(Ver,'Short-integer',ParamValue))/binary>>;
+
+
+ start when Ver >= ?WSP_14 ->
+ <<1:1, 16#19:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ start when Ver >= ?WSP_12 ->
+ <<1:1, 16#0A:7,
+ (encode_typed_field(Ver,'Text-string',ParamValue))/binary>>;
+
+
+ 'start-info' when Ver >= ?WSP_14 ->
+ <<1:1, 16#1A:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ 'start-info' when Ver >= ?WSP_12 ->
+ <<1:1, 16#0B:7,
+ (encode_typed_field(Ver,'Text-string',ParamValue))/binary>>;
+
+ comment when Ver >= ?WSP_14 ->
+ <<1:1, 16#1B:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ comment when Ver >= ?WSP_13 ->
+ <<1:1, 16#0C:7,
+ (encode_typed_field(Ver,'Text-string',ParamValue))/binary>>;
+
+ domain when Ver >= ?WSP_14 ->
+ <<1:1, 16#1C:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ domain when Ver >= ?WSP_13 ->
+ <<1:1, 16#0D:7,
+ (encode_typed_field(Ver,'Text-string',ParamValue))/binary>>;
+
+ 'max-age' when Ver >= ?WSP_13 ->
+ <<1:1, 16#0E:7,
+ (encode_typed_field(Ver,'Delta-seconds-value',ParamValue))/binary>>;
+
+ path when Ver >= ?WSP_14 ->
+ <<1:1, 16#1D:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ path when Ver >= ?WSP_13 ->
+ <<1:1, 16#0F:7,
+ (encode_typed_field(Ver,'Text-string',ParamValue))/binary>>;
+
+ secure when Ver >= ?WSP_13 ->
+ <<1:1, 16#10:7,
+ (encode_typed_field(Ver,'No-value',ParamValue))/binary>>;
+ %% NOTE: "sec" and "mac" are really 1.4 features but used by 1.3 client provisioning
+ %"sec" when Ver >= ?WSP_14 ->
+ sec when Ver >= ?WSP_13 ->
+ <<1:1, 16#11:7,
+ (encode_typed_field(Ver,'Short-integer',ParamValue))/binary>>;
+ %"mac" when Ver >= ?WSP_14 ->
+ mac when Ver >= ?WSP_13 ->
+ <<1:1, 16#12:7,
+ (encode_typed_field(Ver,'Text-value',ParamValue))/binary>>;
+ 'creation-date' when Ver >= ?WSP_14 ->
+ <<1:1, 16#13:7,
+ (encode_typed_field(Ver,'Date-value',ParamValue))/binary>>;
+ 'modification-date' when Ver >= ?WSP_14 ->
+ <<1:1, 16#14:7,
+ (encode_typed_field(Ver,'Date-value',ParamValue))/binary>>;
+ 'read-date' when Ver >= ?WSP_14 ->
+ <<1:1, 16#15:7,
+ (encode_typed_field(Ver,'Date-value',ParamValue))/binary>>;
+ size when Ver >= ?WSP_14 ->
+ <<1:1, 16#16:7,
+ (encode_typed_field(Ver,'Integer-value',ParamValue))/binary>>;
+ _ ->
+ <<(encode_text_string(ParamName))/binary,
+ (encode_text_string(ParamValue))/binary >>
+ end.
+
+%% decode_parameter: return {ParameterName, ParamterValue}
+decode_parameter(<<1:1,Code:7,Data/binary>>, Version) ->
+ case Code of
+ 16#00 ->
+ {Val,Data1} = decode_typed_field('Q-value', Data, Version),
+ {{ q, Val}, Data1};
+
+ 16#01 ->
+ {Val,Data1} = decode_typed_field('Well-known-charset',Data,Version),
+ {{charset, Val}, Data1};
+
+ 16#02 ->
+ {Val,Data1} = decode_typed_field('Version-value',Data,Version),
+ {{level, Val}, Data1};
+
+ 16#03 ->
+ {Val,Data1} = decode_typed_field('Integer-value', Data,Version),
+ {{type, Val}, Data1};
+
+ 16#05 ->
+ {Val,Data1} = decode_typed_field('Text-string', Data,Version),
+ {{name, Val}, Data1};
+
+ 16#06 ->
+ {Val,Data1} = decode_typed_field('Text-string', Data,Version),
+ {{filename, Val}, Data1};
+
+ 16#07 ->
+ {Val,Data1} = decode_typed_field('Field-name', Data,Version),
+ {{differences, Val}, Data1};
+
+ 16#08 ->
+ {Val,Data1} = decode_typed_field('Short-integer', Data,Version),
+ {{padding, Val}, Data1};
+
+ 16#09 ->
+ {Val,Data1} = decode_typed_field('Constrained-encoding', Data,Version),
+ {{type, Val}, Data1};
+
+ 16#0A ->
+ {Val,Data1} = decode_typed_field('Text-string', Data,Version),
+ {{start, Val}, Data1};
+
+ 16#0B ->
+ {Val,Data1} = decode_typed_field('Text-string', Data,Version),
+ {{'start-info', Val}, Data1};
+
+ 16#0C ->
+ {Val,Data1} = decode_typed_field('Text-string', Data,Version),
+ {{comment, Val}, Data1};
+
+ 16#0D ->
+ {Val,Data1} = decode_typed_field('Text-string', Data,Version),
+ {{domain, Val}, Data1};
+
+ 16#0E ->
+ {Val,Data1} = decode_typed_field('Delta-seconds-value', Data,Version),
+ {{'max-age', Val}, Data1};
+
+ 16#0F ->
+ {Val,Data1} = decode_typed_field('Text-string', Data,Version),
+ {{path, Val}, Data1};
+
+ 16#10 ->
+ {Val,Data1} = decode_typed_field('No-value', Data,Version),
+ {{secure, Val}, Data1};
+
+ 16#11 ->
+ {Val,Data1} = decode_typed_field('Short-integer', Data,Version),
+ {{sec, Val}, Data1};
+
+ 16#12 ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{mac, Val}, Data1};
+
+ 16#13 ->
+ {Val,Data1} = decode_typed_field('Date-value', Data,Version),
+ {{'creation-date', Val}, Data1};
+
+ 16#14 ->
+ {Val,Data1} = decode_typed_field('Date-value', Data,Version),
+ {{'modification-date', Val}, Data1};
+
+ 16#15 ->
+ {Val,Data1} = decode_typed_field('Date-value', Data,Version),
+ {{'read-date', Val}, Data1};
+
+ 16#16 ->
+ {Val,Data1} = decode_typed_field('Integer-value', Data,Version),
+ {{size, Val}, Data1};
+
+ 16#17 ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{name, Val}, Data1};
+
+ 16#18 ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{filename, Val}, Data1};
+
+ 16#19 ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{start, Val}, Data1};
+
+ 16#1A ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{'start-info', Val}, Data1};
+
+ 16#1B ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{comment, Val}, Data1};
+
+ 16#1C ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{domain, Val}, Data1};
+
+ 16#1D ->
+ {Val,Data1} = decode_typed_field('Text-value', Data,Version),
+ {{path, Val}, Data1};
+ _ ->
+ exit({error, unknown_parameter})
+ end;
+decode_parameter(Data, _Version) ->
+ %% Untyped-parameter: Token-Text Untype-value
+ {ParamName,Data1} = d_text_string(Data),
+ %% Untype-value: Integer-Value | Text-Value!
+ {ParamValue, Data2} = decode_untyped_value(Data1),
+ {{ParamName,ParamValue}, Data2}.
+
+
+encode_typed_field(Ver,Type,Value) ->
+ case Type of
+ 'Well-known-charset' ->
+ MIBenum = encode_charset(Value),
+ encode_integer(MIBenum);
+
+ 'Constrained-encoding' ->
+ encode_constrained_media(Value, Ver);
+
+ 'Text-string' ->
+ encode_text_string(Value);
+
+ 'Text-value' ->
+ encode_text_value(Value);
+
+ 'Short-integer' ->
+ ?ENCODE_SHORT(Value);
+
+ 'Date-value' ->
+ e_date(Value);
+
+ 'Delta-Seconds-value' ->
+ e_delta_seconds(Value);
+
+ 'No-value' ->
+ e_no_value(Value);
+
+ _ ->
+ io:format("FIXME: encode_typed_field unsupported type = ~p\n",
+ [Type]),
+ exit({error,badtype})
+ end.
+
+
+decode_typed_field(Type, Data, Version) ->
+ case Type of
+ 'Q-value' ->
+ d_q_value(Data);
+
+ 'Well-known-charset' ->
+ {MIBenum, T100} = d_integer_value(Data),
+ {decode_charset(MIBenum), T100};
+
+ 'Constrained-encoding' ->
+ {Value, Data1} = scan_header_data(Data),
+ {decode_constrained_media(Value,Version), Data1};
+
+ 'Text-string' ->
+ d_text_string(Data);
+
+ 'Text-value' ->
+ d_text_value(Data);
+
+ 'Short-integer' ->
+ decode_short_integer(Data);
+
+ 'Delta-seconds-value' ->
+ d_integer_value(Data);
+
+ 'Date-value' ->
+ {Val, Data1} = decode_long_integer(Data),
+ {d_date(Val), Data1};
+
+ 'Field-name' ->
+ d_field_name(Data);
+
+ 'No-value' ->
+ d_no_value(Data);
+
+ _ ->
+ io:format("FIXME: unsupported type = ~p\n",[Type]),
+ exit({error,badtype})
+ end.
+
+
+%% Integer-Value | Text-Value
+%% return as {Value, Tail}
+decode_untyped_value(<<1:1, Short:7, Tail/binary>>) ->
+ {Short, Tail};
+decode_untyped_value(<<0:3, Len:5, Data/binary>>) when Len =/= 31 ->
+ Sz = Len*8,
+ <<Long:Sz, Tail/binary>> = Data,
+ {Long, Tail};
+decode_untyped_value(Data) ->
+ d_text_string(Data).
+
+
+e_field_name(Value, Version) ->
+ case normalise_field_name(Value) of
+ 'Accept' -> <<16#80>>;
+ 'Accept-Charset' when Version >= ?WSP_13 -> <<16#bb>>;
+ 'Accept-Charset' -> <<16#81>>;
+ 'Accept-Encoding' when Version >= ?WSP_13 -> <<16#bc>>;
+ 'Accept-Encoding' -> <<16#82>>;
+ 'Accept-Language' -> <<16#83>>;
+ 'Accept-Ranges' -> <<16#84>>;
+ 'Age' -> <<16#85>>;
+ 'Allow' -> <<16#86>>;
+ 'Authorization' -> <<16#87>>;
+ 'Cache-Control' when Version >= ?WSP_14 -> <<16#c7>>;
+ 'Cache-Control' when Version >= ?WSP_13 -> <<16#bd>>;
+ 'Cache-Control' -> <<16#88>>;
+ 'Connection' -> <<16#89>>;
+ 'Content-Base' -> <<16#8a>>;
+ 'Content-Encoding' -> <<16#8b>>;
+ 'Content-Language' -> <<16#8c>>;
+ 'Content-Length' -> <<16#8d>>;
+ 'Content-Location' -> <<16#8e>>;
+ 'Content-Md5' -> <<16#8f>>;
+ 'Content-Range' when Version >= ?WSP_13 -> <<16#be>>;
+ 'Content-Range' -> <<16#90>>;
+ 'Content-Type' -> <<16#91>>;
+ 'Date' -> <<16#92>>;
+ 'Etag' -> <<16#93>>;
+ 'Expires' -> <<16#94>>;
+ 'From' -> <<16#95>>;
+ 'Host' -> <<16#96>>;
+ 'If-Modified-Since' -> <<16#97>>;
+ 'If-Match' -> <<16#98>>;
+ 'If-None-Match' -> <<16#99>>;
+ 'If-Range' -> <<16#9a>>;
+ 'If-Unmodified-Since' -> <<16#9b>>;
+ 'Location' -> <<16#9c>>;
+ 'Last-Modified' -> <<16#9d>>;
+ 'Max-Forwards' -> <<16#9e>>;
+ 'Pragma' -> <<16#9f>>;
+ 'Proxy-Authenticate' -> <<16#a0>>;
+ 'Proxy-Authorization' -> <<16#a1>>;
+ 'Public' -> <<16#a2>>;
+ 'Range' -> <<16#a3>>;
+ 'Referer' -> <<16#a4>>;
+ 'Retry-After' -> <<16#a5>>;
+ 'Server' -> <<16#a6>>;
+ 'Transfer-Encoding' -> <<16#a7>>;
+ 'Upgrade' -> <<16#a8>>;
+ 'User-Agent' -> <<16#a9>>;
+ 'Vary' -> <<16#aa>>;
+ 'Via' -> <<16#ab>>;
+ 'Warning' -> <<16#ac>>;
+ 'Www-Authenticate' -> <<16#ad>>;
+ 'Content-Disposition' when Version >= ?WSP_14 -> <<16#c5>>;
+ 'Content-Disposition' -> <<16#ae>>;
+ %% VERSION > 1.1
+ 'X-Wap-Application-Id' when Version >= ?WSP_12 -> <<16#af>>;
+ 'X-Wap-Content-Uri' when Version >= ?WSP_12 -> <<16#b0>>;
+ 'X-Wap-Initiator-Uri' when Version >= ?WSP_12 -> <<16#b1>>;
+ 'Accept-Application' when Version >= ?WSP_12 -> <<16#b2>>;
+ 'Bearer-Indication' when Version >= ?WSP_12 -> <<16#b3>>;
+ 'Push-Flag' when Version >= ?WSP_12 -> <<16#b4>>;
+ 'Profile' when Version >= ?WSP_12 -> <<16#b5>>;
+ 'Profile-Diff' when Version >= ?WSP_12 -> <<16#b6>>;
+ 'Profile-Warning' when Version >= ?WSP_12 -> <<16#b7>>;
+ 'Expect' when Version >= ?WSP_15 -> <<16#c8>>;
+ 'Expect' when Version >= ?WSP_13 -> <<16#b8>>;
+ 'Te' when Version >= ?WSP_13 -> <<16#b9>>;
+ 'Trailer' when Version >= ?WSP_13 -> <<16#ba>>;
+ 'X-Wap-Tod' when Version >= ?WSP_13 -> <<16#bf>>;
+ 'Content-Id' when Version >= ?WSP_13 -> <<16#c0>>;
+ 'Set-Cookie' when Version >= ?WSP_13 -> <<16#c1>>;
+ 'Cookie' when Version >= ?WSP_13 -> <<16#c2>>;
+ 'Encoding-Version' when Version >= ?WSP_13 -> <<16#c3>>;
+ 'Profile-Warning' when Version >= ?WSP_14 -> <<16#c4>>;
+ 'X-Wap-Security' when Version >= ?WSP_14 -> <<16#c6>>;
+ 'X-Wap-Loc-Invocation' when Version >= ?WSP_15 -> <<16#c9>>;
+ 'X-Wap-Loc-Delivery' when Version >= ?WSP_15 -> <<16#ca>>;
+ Field -> encode_text_string(atom_to_list(Field))
+ end.
+
+
+%%
+%% decode and normalise on form list_to_atom("Ulll-Ulll-Ull")
+%%
+normalise_field_name(Cs) when atom(Cs) ->
+ Cs;
+normalise_field_name(Cs) ->
+ list_to_atom(normalise_fieldU(Cs)).
+
+normalise_fieldU([C|Cs]) when C >= $a, C =< $z ->
+ [(C-$a)+$A | normalise_fieldL(Cs)];
+normalise_fieldU([C|Cs]) -> [ C | normalise_fieldL(Cs)];
+normalise_fieldU([]) -> [].
+
+normalise_fieldL([C|Cs]) when C >= $A, C =< $Z ->
+ [(C-$A)+$a | normalise_fieldL(Cs)];
+normalise_fieldL([$-|Cs]) -> [$- | normalise_fieldU(Cs)];
+normalise_fieldL([C|Cs]) -> [C | normalise_fieldL(Cs)];
+normalise_fieldL([]) -> [].
+
+
+tolower([C|Cs]) when C >= $A, C =< $Z ->
+ [(C-$A)+$a | tolower(Cs)];
+tolower([C|Cs]) -> [C|tolower(Cs)];
+tolower([]) -> [].
+
+trim(Cs) ->
+ lists:reverse(trim1(lists:reverse(trim1(Cs)))).
+
+trim1([$\s|Cs]) -> trim1(Cs);
+trim1([$\t|Cs]) -> trim1(Cs);
+trim1([$\r|Cs]) -> trim1(Cs);
+trim1([$\n|Cs]) -> trim1(Cs);
+trim1(Cs) -> Cs.
+
+
+d_field_name(Data) ->
+ case scan_header_data(Data) of
+ {Code, Data1} when integer(Code) ->
+ {lookup_field_name(Code), Data1};
+ {TmpField,Data1} when list(TmpField) ->
+ {normalise_field_name(TmpField), Data1}
+ end.
+
+d_no_value(<<0, Data/binary>>) ->
+ {no_value, Data}.
+
+e_no_value(_) ->
+ <<0>>.
+
+
+lookup_field_name(Code) ->
+ case Code of
+%%% Version 1.1
+ 16#00 -> 'Accept';
+ 16#01 -> 'Accept-Charset';
+ 16#02 -> 'Accept-Encoding';
+ 16#03 -> 'Accept-Language';
+ 16#04 -> 'Accept-Ranges';
+ 16#05 -> 'Age';
+ 16#06 -> 'Allow';
+ 16#07 -> 'Authorization';
+ 16#08 -> 'Cache-Control';
+ 16#09 -> 'Connection';
+ 16#0a -> 'Content-Base';
+ 16#0b -> 'Content-Encoding';
+ 16#0c -> 'Content-Language';
+ 16#0d -> 'Content-Length';
+ 16#0e -> 'Content-Location';
+ 16#0f -> 'Content-Md5';
+ 16#10 -> 'Content-Range';
+ 16#11 -> 'Content-Type';
+ 16#12 -> 'Date';
+ 16#13 -> 'Etag';
+ 16#14 -> 'Expires';
+ 16#15 -> 'From';
+ 16#16 -> 'Host';
+ 16#17 -> 'If-Modified-Since';
+ 16#18 -> 'If-Match';
+ 16#19 -> 'If-None-Match';
+ 16#1a -> 'If-Range';
+ 16#1b -> 'If-Unmodified-Since';
+ 16#1c -> 'Location';
+ 16#1d -> 'Last-Modified';
+ 16#1e -> 'Max-Forwards';
+ 16#1f -> 'Pragma';
+ 16#20 -> 'Proxy-Authenticate';
+ 16#21 -> 'Proxy-Authorization';
+ 16#22 -> 'Public';
+ 16#23 -> 'Range';
+ 16#24 -> 'Referer';
+ 16#25 -> 'Retry-After';
+ 16#26 -> 'Server';
+ 16#27 -> 'Transfer-Encoding';
+ 16#28 -> 'Upgrade';
+ 16#29 -> 'User-Agent';
+ 16#2a -> 'Vary';
+ 16#2b -> 'Via';
+ 16#2c -> 'Warning';
+ 16#2d -> 'Www-Authenticate';
+ 16#2e -> 'Content-Disposition';
+%%% Version 1.2
+ 16#2f -> 'X-Wap-Application-Id';
+ 16#30 -> 'X-Wap-Content-Uri';
+ 16#31 -> 'X-Wap-Initiator-Uri';
+ 16#32 -> 'Accept-Application';
+ 16#33 -> 'Bearer-Indication';
+ 16#34 -> 'Push-Flag';
+ 16#35 -> 'Profile';
+ 16#36 -> 'Profile-Diff';
+ 16#37 -> 'Profile-Warning';
+%%% Version 1.3
+ 16#38 -> 'Expect';
+ 16#39 -> 'Te';
+ 16#3a -> 'Trailer';
+ 16#3b -> 'Accept-Charset';
+ 16#3c -> 'Accept-Encoding';
+ 16#3d -> 'Cache-Control';
+ 16#3e -> 'Content-Range';
+ 16#3f -> 'X-Wap-Tod';
+ 16#40 -> 'Content-Id';
+ 16#41 -> 'Set-Cookie';
+ 16#42 -> 'Cookie';
+ 16#43 -> 'Encoding-Version';
+%%% Version 1.4
+ 16#44 -> 'Profile-Warning';
+ 16#45 -> 'Content-Disposition';
+ 16#46 -> 'X-Wap-Security';
+ 16#47 -> 'Cache-Control';
+%%% Version 1.5
+ 16#48 -> 'Expect';
+ 16#49 -> 'X-Wap-Loc-Invocation';
+ 16#4a -> 'X-Wap-Loc-Delivery';
+%% Unknown
+ _ ->
+ list_to_atom("X-Unknown-"++erlang:integer_to_list(Code, 16))
+ end.
+
+
+encode_charset(Charset) ->
+ %% FIXME: we should really resolve aliases as well
+ %% charset:from_aliases(Charset)
+ case charset:from_mime_name(Charset) of
+ 0 -> exit({error, unknown_charset});
+ MIBenum -> MIBenum
+ end.
+
+encode_language(Language) ->
+ Code = encode_lang(tolower(Language)),
+ <<Code>>.
+
+
+
+decode_charset(MIBenum) ->
+ case charset:to_mime_name(MIBenum) of
+ undefined ->
+ exit({error, unknown_charset});
+ Preferred ->
+ Preferred
+ end.
+
+%% ISO 639 Language Assignments, Appendix A, Table 41, Page 102-103
+decode_lang(Code) ->
+ case lookup_language(Code) of
+ [L|_] -> atom_to_list(L);
+ [] -> ""
+ end.
+
+
+lookup_language(Code) ->
+ case Code of
+ 16#01 -> ['aa','afar'];
+ 16#02 -> ['ab','abkhazian'];
+ 16#03 -> ['af','afrikans'];
+ 16#04 -> ['am','amharic'];
+ 16#05 -> ['ar','arabic'];
+ 16#06 -> ['as','assamese'];
+ 16#07 -> ['ay','aymara'];
+ 16#08 -> ['az','azerbaijani'];
+ 16#09 -> ['ba','bashkir'];
+ 16#0a -> ['be','byelorussian'];
+ 16#0b -> ['bg','bulgarian'];
+ 16#0c -> ['bh','bihari'];
+ 16#0d -> ['bi','bislama'];
+ 16#0e -> ['bn','bangla','bengali'];
+ 16#0f -> ['bo','tibetan'];
+ 16#10 -> ['br','breton'];
+ 16#11 -> ['ca','catalan'];
+ 16#12 -> ['co','corsican'];
+ 16#13 -> ['cs','czech'];
+ 16#14 -> ['cy','welsh'];
+ 16#15 -> ['da','danish'];
+ 16#16 -> ['de','german'];
+ 16#17 -> ['dz','bhutani'];
+ 16#18 -> ['el','greek'];
+ 16#19 -> ['en','english'];
+ 16#1a -> ['eo','esperanto'];
+ 16#1b -> ['es','spanish'];
+ 16#1c -> ['et','estonian'];
+ 16#1d -> ['eu','basque'];
+ 16#1e -> ['fa','persian'];
+ 16#1f -> ['fi','finnish'];
+ 16#20 -> ['fj','fiji'];
+ 16#82 -> ['fo','faeroese'];
+ 16#22 -> ['fr','french'];
+ 16#83 -> ['fy','frisian'];
+ 16#24 -> ['ga','irish'];
+ 16#25 -> ['gd','scots-gaelic'];
+ 16#26 -> ['gl','galician'];
+ 16#27 -> ['gn','guarani'];
+ 16#28 -> ['gu','gujarati'];
+ 16#29 -> ['ha','hausa'];
+ 16#2a -> ['he','hebrew'];
+ 16#2b -> ['hi','hindi'];
+ 16#2c -> ['hr','croatian'];
+ 16#2d -> ['hu','hungarian'];
+ 16#2e -> ['hy','armenian'];
+ 16#84 -> ['ia','interlingua'];
+ 16#30 -> ['id','indonesian'];
+ 16#86 -> ['ie','interlingue'];
+ 16#87 -> ['ik','inupiak'];
+ 16#33 -> ['is','icelandic'];
+ 16#34 -> ['it','italian'];
+ 16#89 -> ['iu','inuktitut'];
+ 16#36 -> ['ja','japanese'];
+ 16#37 -> ['jw','javanese'];
+ 16#38 -> ['ka','georgian'];
+ 16#39 -> ['kk','kazakh'];
+ 16#8a -> ['kl','greenlandic'];
+ 16#3b -> ['km','cambodian'];
+ 16#3c -> ['kn','kannada'];
+ 16#3d -> ['ko','korean'];
+ 16#3e -> ['ks','kashmiri'];
+ 16#3f -> ['ku','kurdish'];
+ 16#40 -> ['ky','kirghiz'];
+ 16#8b -> ['la','latin'];
+ 16#42 -> ['ln','lingala'];
+ 16#43 -> ['lo','laothian'];
+ 16#44 -> ['lt','lithuanian'];
+ 16#45 -> ['lv','lettish','latvian'];
+ 16#46 -> ['mg','malagese'];
+ 16#47 -> ['mi','maori'];
+ 16#48 -> ['mk','macedonian'];
+ 16#49 -> ['ml','malayalam'];
+ 16#4a -> ['mn','mongolian'];
+ 16#4b -> ['mo','moldavian'];
+ 16#4c -> ['mr','marathi'];
+ 16#4d -> ['ms','malay'];
+ 16#4e -> ['mt','maltese'];
+ 16#4f -> ['my','burmese'];
+ 16#81 -> ['na','nauru'];
+ 16#51 -> ['ne','nepali'];
+ 16#52 -> ['nl','dutch'];
+ 16#53 -> ['no','norwegian'];
+ 16#54 -> ['oc','occitan'];
+ 16#55 -> ['om','oromo'];
+ 16#56 -> ['or','oriya'];
+ 16#57 -> ['pa','punjabi'];
+ 16#58 -> ['po','polish'];
+ 16#59 -> ['ps','pushto','pashto'];
+ 16#5a -> ['pt','portugese'];
+ 16#5b -> ['qu','quechua'];
+ 16#8c -> ['rm','rhaeto-romance'];
+ 16#5d -> ['rn','kirundi'];
+ 16#5e -> ['ro','romanian'];
+ 16#5f -> ['ru','russian'];
+ 16#60 -> ['rw','kinyarwanda'];
+ 16#61 -> ['sa','sanskrit'];
+ 16#62 -> ['sd','sindhi'];
+ 16#63 -> ['sg','sangho'];
+ 16#64 -> ['sh','serbo-croatian'];
+ 16#65 -> ['si','sinhalese'];
+ 16#66 -> ['sk','slovak'];
+ 16#67 -> ['sl','slovenian'];
+ 16#68 -> ['sm','samoan'];
+ 16#69 -> ['sn','shona'];
+ 16#6a -> ['so','somali'];
+ 16#6b -> ['sq','albanian'];
+ 16#6c -> ['sr','serbian'];
+ 16#6d -> ['ss','siswati'];
+ 16#6e -> ['st','seshoto'];
+ 16#6f -> ['su','sundanese'];
+ 16#70 -> ['sv','swedish'];
+ 16#71 -> ['sw','swahili'];
+ 16#72 -> ['ta','tamil'];
+ 16#73 -> ['te','telugu'];
+ 16#74 -> ['tg','tajik'];
+ 16#75 -> ['th','thai'];
+ 16#76 -> ['ti','tigrinya'];
+ 16#77 -> ['tk','turkmen'];
+ 16#78 -> ['tl','tagalog'];
+ 16#79 -> ['tn','setswana'];
+ 16#7a -> ['to','tonga'];
+ 16#7b -> ['tr','turkish'];
+ 16#7c -> ['ts','tsonga'];
+ 16#7d -> ['tt','tatar'];
+ 16#7e -> ['tw','twi'];
+ 16#7f -> ['ug','uighur'];
+ 16#50 -> ['uk','ukrainian'];
+ 16#21 -> ['ur','urdu'];
+ 16#23 -> ['uz','uzbek'];
+ 16#2f -> ['vi','vietnamese'];
+ 16#85 -> ['vo','volapuk'];
+ 16#31 -> ['wo','wolof'];
+ 16#32 -> ['xh','xhosa'];
+ 16#88 -> ['yi','yiddish'];
+ 16#35 -> ['yo','yoruba'];
+ 16#3a -> ['za','zhuang'];
+ 16#41 -> ['zh','chinese'];
+ 16#5c -> ['zu','zulu'];
+ _ -> []
+ end.
+
+encode_lang(Language) ->
+ case tolower(Language) of
+ "aa" -> 16#01;
+ "afar" -> 16#01;
+ "ab" -> 16#02;
+ "abkhazian" -> 16#02;
+ "af" -> 16#03;
+ "afrikans" -> 16#03;
+ "am" -> 16#04;
+ "amharic" -> 16#04;
+ "ar" -> 16#05;
+ "arabic" -> 16#05;
+ "as" -> 16#06;
+ "assamese" -> 16#06;
+ "ay" -> 16#07;
+ "aymara" -> 16#07;
+ "az" -> 16#08;
+ "azerbaijani" -> 16#08;
+ "ba" -> 16#09;
+ "bashkir" -> 16#09;
+ "be" -> 16#0a;
+ "byelorussian" -> 16#0a;
+ "bg" -> 16#0b;
+ "bulgarian" -> 16#0b;
+ "bh" -> 16#0c;
+ "bihari" -> 16#0c;
+ "bi" -> 16#0d;
+ "bislama" -> 16#0d;
+ "bn" -> 16#0e;
+ "bangla" -> 16#0e;
+ "bengali" -> 16#0e;
+ "bo" -> 16#0f;
+ "tibetan" -> 16#0f;
+ "br" -> 16#10;
+ "breton" -> 16#10;
+ "ca" -> 16#11;
+ "catalan" -> 16#11;
+ "co" -> 16#12;
+ "corsican" -> 16#12;
+ "cs" -> 16#13;
+ "czech" -> 16#13;
+ "cy" -> 16#14;
+ "welsh" -> 16#14;
+ "da" -> 16#15;
+ "danish" -> 16#15;
+ "de" -> 16#16;
+ "german" -> 16#16;
+ "dz" -> 16#17;
+ "bhutani" -> 16#17;
+ "el" -> 16#18;
+ "greek" -> 16#18;
+ "en" -> 16#19;
+ "english" -> 16#19;
+ "eo" -> 16#1a;
+ "esperanto" -> 16#1a;
+ "es" -> 16#1b;
+ "spanish" -> 16#1b;
+ "et" -> 16#1c;
+ "estonian" -> 16#1c;
+ "eu" -> 16#1d;
+ "basque" -> 16#1d;
+ "fa" -> 16#1e;
+ "persian" -> 16#1e;
+ "fi" -> 16#1f;
+ "finnish" -> 16#1f;
+ "fj" -> 16#20;
+ "fiji" -> 16#20;
+ "fo" -> 16#82;
+ "faeroese" -> 16#82;
+ "fr" -> 16#22;
+ "french" -> 16#22;
+ "fy" -> 16#83;
+ "frisian" -> 16#83;
+ "ga" -> 16#24;
+ "irish" -> 16#24;
+ "gd" -> 16#25;
+ "scots-gaelic" -> 16#25;
+ "gl" -> 16#26;
+ "galician" -> 16#26;
+ "gn" -> 16#27;
+ "guarani" -> 16#27;
+ "gu" -> 16#28;
+ "gujarati" -> 16#28;
+ "ha" -> 16#29;
+ "hausa" -> 16#29;
+ "he" -> 16#2a;
+ "hebrew" -> 16#2a;
+ "hi" -> 16#2b;
+ "hindi" -> 16#2b;
+ "hr" -> 16#2c;
+ "croatian" -> 16#2c;
+ "hu" -> 16#2d;
+ "hungarian" -> 16#2d;
+ "hy" -> 16#2e;
+ "armenian" -> 16#2e;
+ "ia" -> 16#84;
+ "interlingua" -> 16#84;
+ "id" -> 16#30;
+ "indonesian" -> 16#30;
+ "ie" -> 16#86;
+ "interlingue" -> 16#86;
+ "ik" -> 16#87;
+ "inupiak" -> 16#87;
+ "is" -> 16#33;
+ "icelandic" -> 16#33;
+ "it" -> 16#34;
+ "italian" -> 16#34;
+ "iu" -> 16#89;
+ "inuktitut" -> 16#89;
+ "ja" -> 16#36;
+ "japanese" -> 16#36;
+ "jw" -> 16#37;
+ "javanese" -> 16#37;
+ "ka" -> 16#38;
+ "georgian" -> 16#38;
+ "kk" -> 16#39;
+ "kazakh" -> 16#39;
+ "kl" -> 16#8a;
+ "greenlandic" -> 16#8a;
+ "km" -> 16#3b;
+ "cambodian" -> 16#3b;
+ "kn" -> 16#3c;
+ "kannada" -> 16#3c;
+ "ko" -> 16#3d;
+ "korean" -> 16#3d;
+ "ks" -> 16#3e;
+ "kashmiri" -> 16#3e;
+ "ku" -> 16#3f;
+ "kurdish" -> 16#3f;
+ "ky" -> 16#40;
+ "kirghiz" -> 16#40;
+ "la" -> 16#8b;
+ "latin" -> 16#8b;
+ "ln" -> 16#42;
+ "lingala" -> 16#42;
+ "lo" -> 16#43;
+ "laothian" -> 16#43;
+ "lt" -> 16#44;
+ "lithuanian" -> 16#44;
+ "lv" -> 16#45;
+ "lettish" -> 16#45;
+ "latvian" -> 16#45;
+ "mg" -> 16#46;
+ "malagese" -> 16#46;
+ "mi" -> 16#47;
+ "maori" -> 16#47;
+ "mk" -> 16#48;
+ "macedonian" -> 16#48;
+ "ml" -> 16#49;
+ "malayalam" -> 16#49;
+ "mn" -> 16#4a;
+ "mongolian" -> 16#4a;
+ "mo" -> 16#4b;
+ "moldavian" -> 16#4b;
+ "mr" -> 16#4c;
+ "marathi" -> 16#4c;
+ "ms" -> 16#4d;
+ "malay" -> 16#4d;
+ "mt" -> 16#4e;
+ "maltese" -> 16#4e;
+ "my" -> 16#4f;
+ "burmese" -> 16#4f;
+ "na" -> 16#81;
+ "nauru" -> 16#81;
+ "ne" -> 16#51;
+ "nepali" -> 16#51;
+ "nl" -> 16#52;
+ "dutch" -> 16#52;
+ "no" -> 16#53;
+ "norwegian" -> 16#53;
+ "oc" -> 16#54;
+ "occitan" -> 16#54;
+ "om" -> 16#55;
+ "oromo" -> 16#55;
+ "or" -> 16#56;
+ "oriya" -> 16#56;
+ "pa" -> 16#57;
+ "punjabi" -> 16#57;
+ "po" -> 16#58;
+ "polish" -> 16#58;
+ "ps" -> 16#59;
+ "pushto" -> 16#59;
+ "pt" -> 16#5a;
+ "portugese" -> 16#5a;
+ "qu" -> 16#5b;
+ "quechua" -> 16#5b;
+ "rm" -> 16#8c;
+ "rhaeto-romance" -> 16#8c;
+ "rn" -> 16#5d;
+ "kirundi" -> 16#5d;
+ "ro" -> 16#5e;
+ "romanian" -> 16#5e;
+ "ru" -> 16#5f;
+ "russian" -> 16#5f;
+ "rw" -> 16#60;
+ "kinyarwanda" -> 16#60;
+ "sa" -> 16#61;
+ "sanskrit" -> 16#61;
+ "sd" -> 16#62;
+ "sindhi" -> 16#62;
+ "sg" -> 16#63;
+ "sangho" -> 16#63;
+ "sh" -> 16#64;
+ "serbo-croatian" -> 16#64;
+ "si" -> 16#65;
+ "sinhalese" -> 16#65;
+ "sk" -> 16#66;
+ "slovak" -> 16#66;
+ "sl" -> 16#67;
+ "slovenian" -> 16#67;
+ "sm" -> 16#68;
+ "samoan" -> 16#68;
+ "sn" -> 16#69;
+ "shona" -> 16#69;
+ "so" -> 16#6a;
+ "somali" -> 16#6a;
+ "sq" -> 16#6b;
+ "albanian" -> 16#6b;
+ "sr" -> 16#6c;
+ "serbian" -> 16#6c;
+ "ss" -> 16#6d;
+ "siswati" -> 16#6d;
+ "st" -> 16#6e;
+ "seshoto" -> 16#6e;
+ "su" -> 16#6f;
+ "sundanese" -> 16#6f;
+ "sv" -> 16#70;
+ "swedish" -> 16#70;
+ "sw" -> 16#71;
+ "swahili" -> 16#71;
+ "ta" -> 16#72;
+ "tamil" -> 16#72;
+ "te" -> 16#73;
+ "telugu" -> 16#73;
+ "tg" -> 16#74;
+ "tajik" -> 16#74;
+ "th" -> 16#75;
+ "thai" -> 16#75;
+ "ti" -> 16#76;
+ "tigrinya" -> 16#76;
+ "tk" -> 16#77;
+ "turkmen" -> 16#77;
+ "tl" -> 16#78;
+ "tagalog" -> 16#78;
+ "tn" -> 16#79;
+ "setswana" -> 16#79;
+ "to" -> 16#7a;
+ "tonga" -> 16#7a;
+ "tr" -> 16#7b;
+ "turkish" -> 16#7b;
+ "ts" -> 16#7c;
+ "tsonga" -> 16#7c;
+ "tt" -> 16#7d;
+ "tatar" -> 16#7d;
+ "tw" -> 16#7e;
+ "twi" -> 16#7e;
+ "ug" -> 16#7f;
+ "uighur" -> 16#7f;
+ "uk" -> 16#50;
+ "ukrainian" -> 16#50;
+ "ur" -> 16#21;
+ "urdu" -> 16#21;
+ "uz" -> 16#23;
+ "uzbek" -> 16#23;
+ "vi" -> 16#2f;
+ "vietnamese" -> 16#2f;
+ "vo" -> 16#85;
+ "volapuk" -> 16#85;
+ "wo" -> 16#31;
+ "wolof" -> 16#31;
+ "xh" -> 16#32;
+ "xhosa" -> 16#32;
+ "yi" -> 16#88;
+ "yiddish" -> 16#88;
+ "yo" -> 16#35;
+ "yoruba" -> 16#35;
+ "za" -> 16#3a;
+ "zhuang" -> 16#3a;
+ "zh" -> 16#41;
+ "chinese" -> 16#41;
+ "zu" -> 16#5c;
+ "zulu" -> 16#5c
+ end.
+
+
+%% Push Application ID Assignments
+%%
+%% Assingment are found at http://www.wapforum.org/wina/push-app-id.htm
+%%
+decode_push_application({short,Data}) ->
+ decode_push_application(d_long(Data));
+
+decode_push_application(Code) when integer(Code) ->
+ case Code of
+ 16#00 -> "x-wap-application:*";
+ 16#01 -> "x-wap-application:push.sia";
+ 16#02 -> "x-wap-application:wml.ua";
+ 16#03 -> "x-wap-application:wta.ua";
+ 16#04 -> "x-wap-application:mms.ua";
+ 16#05 -> "x-wap-application:push.syncml";
+ 16#06 -> "x-wap-application:loc.ua";
+ 16#07 -> "x-wap-application:syncml.dm";
+ 16#08 -> "x-wap-application:drm.ua";
+ 16#09 -> "x-wap-application:emn.ua";
+ 16#0A -> "x-wap-application:wv.ua";
+ 16#8000 -> "x-wap-microsoft:localcontent.ua";
+ 16#8001 -> "x-wap-microsoft:IMclient.ua";
+ 16#8002 -> "x-wap-docomo:imode.mail.ua";
+ 16#8003 -> "x-wap-docomo:imode.mr.ua";
+ 16#8004 -> "x-wap-docomo:imode.mf.ua";
+ 16#8005 -> "x-motorola:location.ua";
+ 16#8006 -> "x-motorola:now.ua";
+ 16#8007 -> "x-motorola:otaprov.ua";
+ 16#8008 -> "x-motorola:browser.ua";
+ 16#8009 -> "x-motorola:splash.ua";
+ 16#800B -> "x-wap-nai:mvsw.command";
+ 16#8010 -> "x-wap-openwave:iota.ua"
+ end;
+decode_push_application(App) when list(App) ->
+ App.
+
+
+
+encode_push_application(App) ->
+ case App of
+ "x-wap-application:*" -> ?ENCODE_SHORT(16#00);
+ "x-wap-application:push.sia" -> ?ENCODE_SHORT(16#01);
+ "x-wap-application:wml.ua" -> ?ENCODE_SHORT(16#02);
+ "x-wap-application:wta.ua" -> ?ENCODE_SHORT(16#03);
+ "x-wap-application:mms.ua" -> ?ENCODE_SHORT(16#04);
+ "x-wap-application:push.syncml" -> ?ENCODE_SHORT(16#05);
+ "x-wap-application:loc.ua" -> ?ENCODE_SHORT(16#06);
+ "x-wap-application:syncml.dm" -> ?ENCODE_SHORT(16#07);
+ "x-wap-application:drm.ua" -> ?ENCODE_SHORT(16#08);
+ "x-wap-application:emn.ua" -> ?ENCODE_SHORT(16#09);
+ "x-wap-application:wv.ua" -> ?ENCODE_SHORT(16#0A);
+ "x-wap-microsoft:localcontent.ua" -> encode_integer(16#8000);
+ "x-wap-microsoft:IMclient.ua" -> encode_integer(16#8001);
+ "x-wap-docomo:imode.mail.ua" -> encode_integer(16#8002);
+ "x-wap-docomo:imode.mr.ua" -> encode_integer(16#8003);
+ "x-wap-docomo:imode.mf.ua" -> encode_integer(16#8004);
+ "x-motorola:location.ua" -> encode_integer(16#8005);
+ "x-motorola:now.ua" -> encode_integer(16#8006);
+ "x-motorola:otaprov.ua" -> encode_integer(16#8007);
+ "x-motorola:browser.ua" -> encode_integer(16#8008);
+ "x-motorola:splash.ua" -> encode_integer(16#8009);
+ "x-wap-nai:mvsw.command" -> encode_integer(16#800B);
+ "x-wap-openwave:iota.ua" -> encode_integer(16#8010);
+ _ -> encode_uri_value(App)
+ end.
+
+
+
+
+%% WSP 8.5 Multipart handling
+
+encode_multipart(Entries) ->
+ encode_multipart(Entries, ?WSP_DEFAULT_VERSION).
+
+encode_multipart([], _Version) ->
+ <<>>;
+encode_multipart(Entries, Version) ->
+ EncEntries = encode_multipart_entries(Entries, Version),
+ <<(e_uintvar(length(Entries)))/binary, EncEntries/binary >>.
+
+encode_multipart_entries(Entries, Version) ->
+ encode_multipart_entries(Entries, Version, []).
+
+encode_multipart_entries([], _Version, Acc) ->
+ list_to_binary(lists:reverse(Acc));
+encode_multipart_entries([Entry|T], Version, Acc) ->
+ EncEntry = encode_multipart_entry(Entry, Version),
+ encode_multipart_entries(T, Version, [EncEntry | Acc]).
+
+encode_multipart_entry(Entry, Version) ->
+ #wsp_multipart_entry { content_type = ContentType,
+ headers = Headers,
+ data = Data } = Entry,
+ EncContentType = encode_content_type(ContentType,Version),
+ EncHeaders = encode_headers(Headers, Version),
+ EncHeadersLength = e_uintvar(size(EncContentType)+size(EncHeaders)),
+ DataLen = e_uintvar(size(Data)),
+ <<EncHeadersLength/binary,
+ DataLen/binary,
+ EncContentType/binary,
+ EncHeaders/binary,
+ Data/binary>>.
+
+
+decode_multipart(Data) ->
+ decode_multipart(Data, ?WSP_DEFAULT_VERSION).
+
+decode_multipart(<<>>, _Version) ->
+ {[], <<>>};
+decode_multipart(Data, Version) ->
+ {Entries, Data1} = d_uintvar(Data),
+ decode_multipart_entries(Entries, Data1, Version).
+
+decode_multipart_entries(Entries, Data, Version) ->
+ decode_multipart_entries(Entries, Data, Version, []).
+
+decode_multipart_entries(0, Data, _Version, Acc) ->
+ {lists:reverse(Acc), Data};
+decode_multipart_entries(Entries, Data, Version, Acc) ->
+ {MultiPartEntry, Data1} = decode_multipart_entry(Data,Version),
+ decode_multipart_entries(Entries-1, Data1, Version, [MultiPartEntry|Acc]).
+
+decode_multipart_entry(Data, Version) ->
+ {HeadersLen, Data1} = d_uintvar(Data),
+ {DataLen, Data2} = d_uintvar(Data1),
+ {FieldData,Data3} = scan_header_data(Data2),
+ ContentType = decode_content_type(FieldData, Version),
+ BinHeadersLen = (HeadersLen-(size(Data2)-size(Data3))),
+ <<BinHeaders:BinHeadersLen/binary,Data4/binary>> = Data3,
+ Headers = decode_headers(BinHeaders, Version),
+ <<ValueData:DataLen/binary, Data5/binary>> = Data4,
+ {#wsp_multipart_entry{content_type=ContentType,
+ headers=Headers,
+ data=ValueData},Data5}.
+
+
+parse_credentials(Field, Value) ->
+ %% FIXME
+ ?WH(Field, Value, []).
+
+format_credentials("basic", [User,Password]) ->
+ ["Basic ", base64:encode(User++":"++Password)];
+format_credentials(Scheme, Params) ->
+ [Scheme, format_params(Params)].
+
+encode_credentials("basic", [User,Password], _Version) ->
+ e_value(?ENCODE_SHORT(0),
+ encode_text_string(User),
+ encode_text_string(Password));
+encode_credentials(Scheme, Params, Version) ->
+ e_value(encode_text_string(Scheme), encode_params(Params, Version)).
+
+decode_credentials(Field, Data, Version) ->
+ case scan_header_data(Data) of
+ {0, Data0} ->
+ {User,Data1} = d_text_string(Data0),
+ {Password,_Data2} = d_text_string(Data1),
+ ?WH(Field, "basic", [User,Password]);
+ {Scheme, Data0} when list(Scheme) ->
+ Params = decode_params(Data0, Version),
+ ?WH(Field, Scheme, Params)
+ end.
+
+%%
+%% Challenge: Basic Realm-value | Auth-Scheme Realm *Auth-Params
+%%
+
+parse_challenge(Field, Value) ->
+ %% FIXME
+ ?WH(Field, Value, []).
+
+format_challenge({"basic",Realm}, []) ->
+ ["Basic ", Realm];
+format_challenge({Scheme,Realm}, Params) ->
+ [Scheme," ",Realm, format_params(Params)].
+
+encode_challenge({"basic",Realm}, [], _Version) ->
+ e_value(?ENCODE_SHORT(0),
+ encode_text_string(Realm));
+encode_challenge({Scheme,Realm}, Params, Version) ->
+ e_value(encode_text_string(Scheme),
+ encode_text_string(Realm),
+ encode_params(Params, Version)).
+
+decode_challenge(Field, Data, Version) ->
+ case scan_header_data(Data) of
+ {0, Data0} ->
+ {Realm,_} = d_text_string(Data0),
+ ?WH(Field, {"basic", Realm}, []);
+ {Scheme, Data0} when list(Scheme) ->
+ {Realm,_} = d_text_string(Data0),
+ Params = decode_params(Data0, Version),
+ ?WH(Field, {Scheme,Realm}, Params)
+ end.
+
+
+parse_well_known_method(Value) ->
+ case Value of
+ "GET" -> 'GET';
+ "OPTIONS" -> 'OPTIONS';
+ "HEAD" -> 'HEAD';
+ "DELETE" -> 'DELETE';
+ "TRACE" -> 'TRACE';
+ "POST" -> 'POST';
+ "PUT" -> 'PUT'
+ end.
+
+encode_well_known_method(Value, _Version) ->
+ case Value of
+ 'GET' -> ?ENCODE_SHORT(16#40);
+ 'OPTIONS' -> ?ENCODE_SHORT(16#41);
+ 'HEAD' -> ?ENCODE_SHORT(16#42);
+ 'DELETE' -> ?ENCODE_SHORT(16#43);
+ 'TRACE' -> ?ENCODE_SHORT(16#44);
+ 'POST' -> ?ENCODE_SHORT(16#60);
+ 'PUT' -> ?ENCODE_SHORT(16#61)
+ end.
+
+decode_well_known_method(Value, _Version) ->
+ case Value of
+ 16#40 -> 'GET';
+ 16#41 -> 'OPTIONS';
+ 16#42 -> 'HEAD';
+ 16#43 -> 'DELETE';
+ 16#44 -> 'TRACE';
+ 16#60 -> 'POST';
+ 16#61 -> 'PUT'
+ end.
+
+
+
+%%
+%% WSP Table 36. Status Code Assignments
+%%
+
+encode_status_code(Status) ->
+ case Status of
+ 100 -> 16#10; %% 'Continue'
+ 101 -> 16#11; %% 'Switching Protocols'
+ 200 -> 16#20; %% 'OK, Success'
+ 201 -> 16#21; %% 'Created'
+ 202 -> 16#22; %% 'Accepted'
+ 203 -> 16#23; %% 'Non-Authoritative Information'
+ 204 -> 16#24; %% 'No Content'
+ 205 -> 16#25; %% 'Reset Content'
+ 206 -> 16#26; %% 'Partial Content'
+ 300 -> 16#30; %% 'Multiple Choices'
+ 301 -> 16#31; %% 'Moved Permanently'
+ 302 -> 16#32; %% 'Moved temporarily'
+ 303 -> 16#33; %% 'See Other'
+ 304 -> 16#34; %% 'Not modified'
+ 305 -> 16#35; %% 'Use Proxy'
+ 306 -> 16#36; %% '(reserved)'
+ 307 -> 16#37; %% 'Temporary Redirect'
+ 400 -> 16#40; %% 'Bad Request - server could not understand request'
+ 401 -> 16#41; %% 'Unauthorized'
+ 402 -> 16#42; %% 'Payment required'
+ 403 -> 16#43; %% 'Forbidden operation is understood but refused'
+ 404 -> 16#44; %% 'Not Found'
+ 405 -> 16#45; %% 'Method not allowed'
+ 406 -> 16#46; %% 'Not Acceptable'
+ 407 -> 16#47; %% 'Proxy Authentication required'
+ 408 -> 16#48; %% 'Request Timeout'
+ 409 -> 16#49; %% 'Conflict'
+ 410 -> 16#4A; %% 'Gone'
+ 411 -> 16#4B; %% 'Length Required'
+ 412 -> 16#4C; %% 'Precondition failed'
+ 413 -> 16#4D; %% 'Request entity too large'
+ 414 -> 16#4E; %% 'Request-URI too large'
+ 415 -> 16#4F; %% 'Unsupported media type'
+ 416 -> 16#50; %% 'Requested Range Not Satisfiable'
+ 417 -> 16#51; %% 'Expectation Failed'
+ 500 -> 16#60; %% 'Internal Server Error'
+ 501 -> 16#61; %% 'Not Implemented'
+ 502 -> 16#62; %% 'Bad Gateway'
+ 503 -> 16#63; %% 'Service Unavailable'
+ 504 -> 16#64; %% 'Gateway Timeout'
+ 505 -> 16#65 %% 'HTTP version not supported'
+ end.
+
+
+decode_status_code(StatusCode) ->
+ case StatusCode of
+ 16#10 -> 100; %% 'Continue'
+ 16#11 -> 101; %% 'Switching Protocols'
+ 16#20 -> 200; %% 'OK, Success'
+ 16#21 -> 201; %% 'Created'
+ 16#22 -> 202; %% 'Accepted'
+ 16#23 -> 203; %% 'Non-Authoritative Information'
+ 16#24 -> 204; %% 'No Content'
+ 16#25 -> 205; %% 'Reset Content'
+ 16#26 -> 206; %% 'Partial Content'
+ 16#30 -> 300; %% 'Multiple Choices'
+ 16#31 -> 301; %% 'Moved Permanently'
+ 16#32 -> 302; %% 'Moved temporarily'
+ 16#33 -> 303; %% 'See Other'
+ 16#34 -> 304; %% 'Not modified'
+ 16#35 -> 305; %% 'Use Proxy'
+ 16#36 -> 306; %% '(reserved)'
+ 16#37 -> 307; %% 'Temporary Redirect'
+ 16#40 -> 400; %% 'Bad Request - server could not understand request'
+ 16#41 -> 401; %% 'Unauthorized'
+ 16#42 -> 402; %% 'Payment required'
+ 16#43 -> 403; %% 'Forbidden operation is understood but refused'
+ 16#44 -> 404; %% 'Not Found'
+ 16#45 -> 405; %% 'Method not allowed'
+ 16#46 -> 406; %% 'Not Acceptable'
+ 16#47 -> 407; %% 'Proxy Authentication required'
+ 16#48 -> 408; %% 'Request Timeout'
+ 16#49 -> 409; %% 'Conflict'
+ 16#4A -> 410; %% 'Gone'
+ 16#4B -> 411; %% 'Length Required'
+ 16#4C -> 412; %% 'Precondition failed'
+ 16#4D -> 413; %% 'Request entity too large'
+ 16#4E -> 414; %% 'Request-URI too large'
+ 16#4F -> 415; %% 'Unsupported media type'
+ 16#50 -> 416; %% 'Requested Range Not Satisfiable'
+ 16#51 -> 417; %% 'Expectation Failed'
+ 16#60 -> 500; %% 'Internal Server Error'
+ 16#61 -> 501; %% 'Not Implemented'
+ 16#62 -> 502; %% 'Bad Gateway'
+ 16#63 -> 503; %% 'Service Unavailable'
+ 16#64 -> 504; %% 'Gateway Timeout'
+ 16#65 -> 505 %% 'HTTP version not supported'
+ end.
+
+
+%%
+%% Content Type Assignments
+%%
+%% Assingment are found at http://www.wapforum.org/wina/wsp-content-type.htm
+%%
+%%
+%% string(Version, ContentType) -> Code
+%%
+encode_well_known_media(ContentType, Version) ->
+ case ContentType of
+ %% WSP_REGISTERED_CONTENT_TYPES
+ "application/vnd.uplanet.cacheop-wbxml" ->
+ encode_integer(16#0201);
+ "application/vnd.uplanet.signal" ->
+ encode_integer(16#0202);
+ "application/vnd.uplanet.alert-wbxml" ->
+ encode_integer(16#0203);
+ "application/vnd.uplanet.list-wbxml" ->
+ encode_integer(16#0204);
+ "application/vnd.uplanet.listcmd-wbxml" ->
+ encode_integer(16#0205);
+ "application/vnd.uplanet.channel-wbxml" ->
+ encode_integer(16#0206);
+ "application/vnd.uplanet.provisioning-status-uri" ->
+ encode_integer(16#0207);
+ "x-wap.multipart/vnd.uplanet.header-set" ->
+ encode_integer(16#0208);
+ "application/vnd.uplanet.bearer-choice-wbxml" ->
+ encode_integer(16#0209);
+ "application/vnd.phonecom.mmc-wbxml" ->
+ encode_integer(16#020A);
+ "application/vnd.nokia.syncset+wbxml" ->
+ encode_integer(16#020B);
+ "image/x-up-wpng" ->
+ encode_integer(16#020C);
+ _ ->
+ encode_constrained_media(ContentType, Version)
+ end.
+
+
+encode_constrained_media(ContentType, Version) ->
+ case ContentType of
+ "*/*" -> ?ENCODE_SHORT(16#00);
+ "text/*" -> ?ENCODE_SHORT(16#01);
+ "text/html" -> ?ENCODE_SHORT(16#02);
+ "text/plain" -> ?ENCODE_SHORT(16#03);
+ "text/x-hdml" -> ?ENCODE_SHORT(16#04);
+ "text/x-ttml" -> ?ENCODE_SHORT(16#05);
+ "text/x-vcalendar" -> ?ENCODE_SHORT(16#06);
+ "text/x-vcard" -> ?ENCODE_SHORT(16#07);
+ "text/vnd.wap.wml" -> ?ENCODE_SHORT(16#08);
+ "text/vnd.wap.wmlscript" -> ?ENCODE_SHORT(16#09);
+ "text/vnd.wap.wta-event" -> ?ENCODE_SHORT(16#0A);
+ "multipart/*" -> ?ENCODE_SHORT(16#0B);
+ "multipart/mixed" -> ?ENCODE_SHORT(16#0C);
+ "multipart/form-data" -> ?ENCODE_SHORT(16#0D);
+ "multipart/byterantes" -> ?ENCODE_SHORT(16#0E);
+ "multipart/alternative" -> ?ENCODE_SHORT(16#0F);
+ "application/*" -> ?ENCODE_SHORT(16#10);
+ "application/java-vm" -> ?ENCODE_SHORT(16#11);
+ "application/x-www-form-urlencoded" -> ?ENCODE_SHORT(16#12);
+ "application/x-hdmlc" -> ?ENCODE_SHORT(16#13);
+ "application/vnd.wap.wmlc" -> ?ENCODE_SHORT(16#14);
+ "application/vnd.wap.wmlscriptc" -> ?ENCODE_SHORT(16#15);
+ "application/vnd.wap.wta-eventc" -> ?ENCODE_SHORT(16#16);
+ "application/vnd.wap.uaprof" -> ?ENCODE_SHORT(16#17);
+ "application/vnd.wap.wtls-ca-certificate" -> ?ENCODE_SHORT(16#18);
+ "application/vnd.wap.wtls-user-certificate" -> ?ENCODE_SHORT(16#19);
+ "application/x-x509-ca-cert" -> ?ENCODE_SHORT(16#1A);
+ "application/x-x509-user-cert" -> ?ENCODE_SHORT(16#1B);
+ "image/*" -> ?ENCODE_SHORT(16#1C);
+ "image/gif" -> ?ENCODE_SHORT(16#1D);
+ "image/jpeg" -> ?ENCODE_SHORT(16#1E);
+ "image/tiff" -> ?ENCODE_SHORT(16#1F);
+ "image/png" -> ?ENCODE_SHORT(16#20);
+ "image/vnd.wap.wbmp" -> ?ENCODE_SHORT(16#21);
+ "application/vnd.wap.multipart.*" -> ?ENCODE_SHORT(16#22);
+ "application/vnd.wap.multipart.mixed" -> ?ENCODE_SHORT(16#23);
+ "application/vnd.wap.multipart.form-data" -> ?ENCODE_SHORT(16#24);
+ "application/vnd.wap.multipart.byteranges" -> ?ENCODE_SHORT(16#25);
+ "application/vnd.wap.multipart.alternative" -> ?ENCODE_SHORT(16#26);
+ "application/xml" -> ?ENCODE_SHORT(16#27);
+ "text/xml" -> ?ENCODE_SHORT(16#28);
+ "application/vnd.wap.wbxml" -> ?ENCODE_SHORT(16#29);
+ "application/x-x968-cross-cert" -> ?ENCODE_SHORT(16#2A);
+ "application/x-x968-ca-cert" -> ?ENCODE_SHORT(16#2B);
+ "application/x-x968-user-cert" -> ?ENCODE_SHORT(16#2C);
+
+ %% WAP Version 1.2
+ "text/vnd.wap.si" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#2D);
+ "application/vnd.wap.sic" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#2E);
+ "text/vnd.wap.sl" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#2F);
+ "application/vnd.wap.slc" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#30);
+ "text/vnd.wap.co" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#31);
+ "application/vnd.wap.coc" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#32);
+ "application/vnd.wap.multipart.related" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#33);
+ "application/vnd.wap.sia" when Version >= ?WSP_12 ->
+ ?ENCODE_SHORT(16#34);
+ %% WAP Version 1.3
+ "text/vnd.wap.connectivity-xml" when Version >= ?WSP_13 ->
+ ?ENCODE_SHORT(16#35);
+ "application/vnd.wap.connectivity-wbxml" when Version >= ?WSP_13 ->
+ ?ENCODE_SHORT(16#36);
+ %% WAP Version 1.4
+ "application/pkcs7-mime" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#37);
+ "application/vnd.wap.hashed-certificate" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#38);
+ "application/vnd.wap.signed-certificate" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#39);
+ "application/vnd.wap.cert-response" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#3A);
+ "application/xhtml+xml" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#3B);
+ "application/wml+xml" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#3C);
+ "text/css" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#3D);
+ "application/vnd.wap.mms-message" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#3E);
+ "application/vnd.wap.rollover-certificate" when Version >= ?WSP_14 ->
+ ?ENCODE_SHORT(16#3F);
+ %% WAP Version 1.5
+ "application/vnd.wap.locc+wbxml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#40);
+ "application/vnd.wap.loc+xml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#41);
+ "application/vnd.syncml.dm+wbxml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#42);
+ "application/vnd.syncml.dm+xml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#43);
+ "application/vnd.syncml.notification" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#44);
+ "application/vnd.wap.xhtml+xml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#45);
+ "application/vnd.wv.csp.cir" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#46);
+ "application/vnd.oma.dd+xml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#47);
+ "application/vnd.oma.drm.message" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#48);
+ "application/vnd.oma.drm.content" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#49);
+ "application/vnd.oma.drm.rights+xml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#4A);
+ "application/vnd.oma.drm.rights+wbxml" when Version >= ?WSP_15 ->
+ ?ENCODE_SHORT(16#4B);
+ _ ->
+ encode_text_string(ContentType)
+ end.
+
+
+decode_well_known_media(Code, Version) when integer(Code) ->
+ case Code of
+ %% WSP_REGISTERED_CONTENT_TYPES
+ 16#0201 -> "application/vnd.uplanet.cacheop-wbxml";
+ 16#0202 -> "application/vnd.uplanet.signal";
+ 16#0203 -> "application/vnd.uplanet.alert-wbxml";
+ 16#0204 -> "application/vnd.uplanet.list-wbxml";
+ 16#0205 -> "application/vnd.uplanet.listcmd-wbxml";
+ 16#0206 -> "application/vnd.uplanet.channel-wbxml";
+ 16#0207 -> "application/vnd.uplanet.provisioning-status-uri";
+ 16#0208 -> "x-wap.multipart/vnd.uplanet.header-set";
+ 16#0209 -> "application/vnd.uplanet.bearer-choice-wbxml";
+ 16#020A -> "application/vnd.phonecom.mmc-wbxml";
+ 16#020B -> "application/vnd.nokia.syncset+wbxml";
+ 16#020C -> "image/x-up-wpng";
+ _ -> decode_constrained_media(Code, Version)
+ end;
+decode_well_known_media(Media, _Version) when list(Media) ->
+ Media;
+decode_well_known_media({short,_Data}, Version) ->
+ decode_well_known_media(d_long(data), Version). %% BUG HERE: Data
+
+
+decode_constrained_media(Code, _Version) when integer(Code) ->
+ case Code of
+ 16#00 -> "*/*";
+ 16#01 -> "text/*";
+ 16#02 -> "text/html";
+ 16#03 -> "text/plain";
+ 16#04 -> "text/x-hdml";
+ 16#05 -> "text/x-ttml";
+ 16#06 -> "text/x-vcalendar";
+ 16#07 -> "text/x-vcard";
+ 16#08 -> "text/vnd.wap.wml";
+ 16#09 -> "text/vnd.wap.wmlscript";
+ 16#0A -> "text/vnd.wap.wta-event";
+ 16#0B -> "multipart/*";
+ 16#0C -> "multipart/mixed";
+ 16#0D -> "multipart/form-data";
+ 16#0E -> "multipart/byterantes";
+ 16#0F -> "multipart/alternative";
+ 16#10 -> "application/*";
+ 16#11 -> "application/java-vm";
+ 16#12 -> "application/x-www-form-urlencoded";
+ 16#13 -> "application/x-hdmlc";
+ 16#14 -> "application/vnd.wap.wmlc";
+ 16#15 -> "application/vnd.wap.wmlscriptc";
+ 16#16 -> "application/vnd.wap.wta-eventc";
+ 16#17 -> "application/vnd.wap.uaprof";
+ 16#18 -> "application/vnd.wap.wtls-ca-certificate";
+ 16#19 -> "application/vnd.wap.wtls-user-certificate";
+ 16#1A -> "application/x-x509-ca-cert";
+ 16#1B -> "application/x-x509-user-cert";
+ 16#1C -> "image/*";
+ 16#1D -> "image/gif";
+ 16#1E -> "image/jpeg";
+ 16#1F -> "image/tiff";
+ 16#20 -> "image/png";
+ 16#21 -> "image/vnd.wap.wbmp";
+ 16#22 -> "application/vnd.wap.multipart.*";
+ 16#23 -> "application/vnd.wap.multipart.mixed";
+ 16#24 -> "application/vnd.wap.multipart.form-data";
+ 16#25 -> "application/vnd.wap.multipart.byteranges";
+ 16#26 -> "application/vnd.wap.multipart.alternative";
+ 16#27 -> "application/xml";
+ 16#28 -> "text/xml";
+ 16#29 -> "application/vnd.wap.wbxml";
+ 16#2A -> "application/x-x968-cross-cert";
+ 16#2B -> "application/x-x968-ca-cert";
+ 16#2C -> "application/x-x968-user-cert";
+ %% WAP Version 1.2
+ 16#2D -> "text/vnd.wap.si";
+ 16#2E -> "application/vnd.wap.sic";
+ 16#2F -> "text/vnd.wap.sl";
+ 16#30 -> "application/vnd.wap.slc";
+ 16#31 -> "text/vnd.wap.co";
+ 16#32 -> "application/vnd.wap.coc";
+ 16#33 -> "application/vnd.wap.multipart.related";
+ 16#34 -> "application/vnd.wap.sia";
+ %% WAP Version 1.3
+ 16#35 -> "text/vnd.wap.connectivity-xml";
+ 16#36 -> "application/vnd.wap.connectivity-wbxml";
+ %% WAP Version 1.4
+ 16#37 -> "application/pkcs7-mime";
+ 16#38 -> "application/vnd.wap.hashed-certificate";
+ 16#39 -> "application/vnd.wap.signed-certificate";
+ 16#3A -> "application/vnd.wap.cert-response";
+ 16#3B -> "application/xhtml+xml";
+ 16#3C -> "application/wml+xml";
+ 16#3D -> "text/css";
+ 16#3E -> "application/vnd.wap.mms-message";
+ 16#3F -> "application/vnd.wap.rollover-certificate";
+ %% WAP Version 1.5
+ 16#40 -> "application/vnd.wap.locc+wbxml";
+ 16#41 -> "application/vnd.wap.loc+xml";
+ 16#42 -> "application/vnd.syncml.dm+wbxml";
+ 16#43 -> "application/vnd.syncml.dm+xml";
+ 16#44 -> "application/vnd.syncml.notification";
+ 16#45 -> "application/vnd.wap.xhtml+xml";
+ 16#46 -> "application/vnd.wv.csp.cir";
+ 16#47 -> "application/vnd.oma.dd+xml";
+ 16#48 -> "application/vnd.oma.drm.message";
+ 16#49 -> "application/vnd.oma.drm.content";
+ 16#4A -> "application/vnd.oma.drm.rights+xml";
+ 16#4B -> "application/vnd.oma.drm.rights+wbxml"
+ end;
+decode_constrained_media(Media, _Version) when list(Media) ->
+ Media.
+
+
+%% Parse <integer> or <integer>.<integer>
+
+parse_version(Value) ->
+ case string:tokens(Value, ".") of
+ [Major,Minor] ->
+ {list_to_integer(Major), list_to_integer(Minor)};
+ [Major] ->
+ case catch list_to_integer(Major) of
+ {'EXIT', _} ->
+ Value;
+ V -> V
+ end
+ end.
+
+format_version({Major,Minor}) ->
+ [integer_to_list(Major),".",integer_to_list(Minor)];
+format_version(Major) when integer(Major) ->
+ integer_to_list(Major);
+format_version(Version) when list(Version) ->
+ Version.
+
+encode_version({Major,Minor}) ->
+ Ver = (((Major-1) band 16#7) bsl 4) bor (Minor band 16#f),
+ ?ENCODE_SHORT(Ver);
+encode_version(Major) when integer(Major) ->
+ Ver = ((Major band 16#7) bsl 4) bor 16#f,
+ ?ENCODE_SHORT(Ver);
+encode_version(Value) when list(Value) ->
+ encode_text_string(Value).
+
+
+decode_version(Value) when integer(Value) ->
+ Major = (Value bsr 4) band 16#7,
+ Minor = Value band 16#f,
+ if Minor == 16#f ->
+ Major;
+ true ->
+ {Major+1,Minor}
+ end;
+decode_version(Value) when list(Value) ->
+ Value.
+
+
+encode_mms_version({Major,Minor}) ->
+ Ver = ((Major band 16#7) bsl 4) bor (Minor band 16#f),
+ ?ENCODE_SHORT(Ver);
+encode_mms_version(Major) when integer(Major) ->
+ Ver = ((Major band 16#7) bsl 4) bor 16#f,
+ ?ENCODE_SHORT(Ver);
+encode_mms_version(Value) when list(Value) ->
+ encode_text_string(Value).
+
+
+decode_mms_version(Value) when integer(Value) ->
+ Major = (Value bsr 4) band 16#7,
+ Minor = Value band 16#f,
+ if Minor == 16#f ->
+ Major;
+ true ->
+ {Major,Minor}
+ end;
+decode_mms_version(Value) when list(Value) ->
+ Value.
+
+
+%%%
+%%% Basic data types
+%%%
+
+e_delta_seconds(Value) ->
+ encode_integer(Value).
+
+
+encode_integer(I) when integer(I), I >= 0 , I < 127 ->
+ ?ENCODE_SHORT(I);
+encode_integer(I) when integer(I) ->
+ encode_long_integer(I);
+encode_integer(List) when list(List) ->
+ encode_integer(list_to_integer(List)).
+
+decode_integer(Value) when integer(Value) ->
+ Value;
+decode_integer({short,Data}) ->
+ Sz = size(Data)*8,
+ <<Value:Sz>> = Data,
+ Value.
+
+encode_short_integer(I) ->
+ ?ENCODE_SHORT(I).
+
+encode_long_integer(I) when I >= 0 ->
+ MOInt = encode_multioctet_integer(I, []),
+ MOIntLen = length(MOInt),
+ list_to_binary([MOIntLen band 16#1f | MOInt]).
+
+encode_multioctet_integer(I,Acc) when I < 256 ->
+ [I | Acc];
+encode_multioctet_integer(I,Acc) ->
+ encode_multioctet_integer(I bsr 8, [(I band 16#ff) | Acc]).
+
+
+%% Integer-Value: Short-Integer | Long-Integer
+%% Short-Integer: <<1:Short:7>>
+%% Long-Integer: <<0-30, X:0-30>>
+%% return {Integer,Tail}
+d_integer_value(<<1:1,Integer:7,Tail/binary>>) ->
+ {Integer, Tail};
+d_integer_value(<<0:3,Len:5,Data/binary>>) when Len =/= 31 ->
+ Sz = Len*8,
+ <<Integer:Sz, Tail/binary>> = Data,
+ {Integer, Tail}.
+
+decode_short_integer(<<1:1,Septet:7,T100/binary>>) ->
+ {Septet, T100}.
+
+decode_long_integer(<<0:3,Len:5,Data/binary>>) when Len =/= 31 ->
+ Sz = Len*8,
+ <<Val:Sz, Tail/binary>> = Data,
+ {Val, Tail}.
+
+d_long(Data) ->
+ Sz = size(Data)*8,
+ <<Value:Sz>> = Data,
+ Value.
+
+
+encode_uri_value(Data) ->
+ encode_text_string(Data).
+
+decode_uri_value(Data) when list(Data) ->
+ Data.
+
+%% parse quoted string
+decode_quoted_string([$" | List]) ->
+ List.
+
+encode_quoted_string([$" | Value]) ->
+ case lists:reverse(Value) of
+ [$" | Value1] ->
+ <<$", (list_to_binary(lists:reverse(Value1)))/binary, 0>>;
+ _ ->
+ <<$", (list_to_binary(Value))/binary, 0>>
+ end;
+encode_quoted_string(Value) ->
+ <<$", (list_to_binary(Value))/binary, 0>>.
+
+
+
+decode_text_string(List) when list(List) ->
+ List;
+decode_text_string(Bin) when binary(Bin) ->
+ binary_to_list(Bin).
+
+
+
+encode_text_string(A) when atom(A) ->
+ encode_text_string(atom_to_list(A));
+encode_text_string([H|T]) when H >= 128 ->
+ <<(list_to_binary([127,H|T]))/binary,0>>;
+encode_text_string(S) ->
+ <<(list_to_binary(S))/binary,0>>.
+
+
+encode_text_value(undefined) ->
+ <<0>>;
+encode_text_value([$"|T]) ->
+ %% remove ending quote ?
+ <<34,(list_to_binary(T))/binary>>;
+encode_text_value(L) ->
+ encode_text_string(L).
+
+
+d_text_value(<<0,T100/binary>>) ->
+ { "", T100};
+d_text_value(<<34,_Tail/binary>>=Data) ->
+ d_text_string(Data);
+d_text_value(Data) ->
+ d_text_string(Data).
+
+
+d_text_string(<<127,Data/binary>>) -> %% Remove quote
+ d_text_string(Data,[]);
+d_text_string(Data) ->
+ d_text_string(Data,[]).
+
+d_text_string(<<0,Tail/binary>>,A) ->
+ {lists:reverse(A), Tail};
+d_text_string(<<C,Tail/binary>>,A) ->
+ d_text_string(Tail,[C|A]);
+d_text_string(<<>>, A) ->
+ {lists:reverse(A), <<>>}.
+
+
+d_q_value(<<0:1,Q:7,Tail/binary>>) ->
+ QVal =
+ if Q >= 1, Q =< 100 ->
+ lists:flatten(io_lib:format("0.~2..0w", [Q-1]));
+ Q >= 101, Q =< 1099 ->
+ lists:flatten(io_lib:format("0.~3..0w", [Q-100]));
+ true ->
+ io:format("Q-value to big ~w\n", [Q]),
+ "***"
+ end,
+ {QVal, Tail};
+d_q_value(<<1:1,Q1:7,0:1,Q0:7,Tail/binary>>) ->
+ Q = (Q1 bsl 7) bor Q0,
+ QVal =
+ if Q >= 1, Q =< 100 ->
+ lists:flatten(io_lib:format("0.~2..0w", [Q-1]));
+ Q >= 101, Q =< 1099 ->
+ lists:flatten(io_lib:format("0.~3..0w", [Q-100]));
+ true ->
+ io:format("Q-value to big ~w\n", [Q]),
+ "***"
+ end,
+ {QVal, Tail}.
+
+
+%%
+%% Decode uintvar
+%%
+d_uintvar(<<0:1,S0:7,T100/binary>>) ->
+ {S0, T100};
+d_uintvar(<<1:1,S1:7,0:1,S0:7,T100/binary>>) ->
+ {(S1 bsl 7) bor S0, T100};
+d_uintvar(<<1:1,S2:7,1:1,S1:7,0:1,S0:7,T100/binary>>) ->
+ {(S2 bsl 14) bor (S1 bsl 7) bor S0, T100};
+d_uintvar(<<1:1,S3:7,1:1,S2:7,1:1,S1:7,0:1,S0:7,T100/binary>>) ->
+ {(S3 bsl 21) bor (S2 bsl 14) bor (S1 bsl 7) bor S0, T100};
+d_uintvar(<<1:1,S4:7,1:1,S3:7,1:1,S2:7,1:1,S1:7,0:1,S0:7,T100/binary>>) ->
+ {(S4 bsl 28) bor (S3 bsl 21) bor (S2 bsl 14) bor (S1 bsl 7) bor S0, T100}.
+
+
+e_uintvar(I) when I < 128 -> <<I>>;
+e_uintvar(I) -> e_uintvar(I,[]).
+
+e_uintvar(0,Acc) ->
+ list_to_binary(Acc);
+e_uintvar(I,[]) ->
+ e_uintvar(I bsr 7, [I band 16#7f]);
+e_uintvar(I,Acc) ->
+ e_uintvar(I bsr 7, [16#80 bor (I band 16#7f) | Acc]).
+
+
+e_value(B) ->
+ Sz = size(B),
+ if Sz =< 30 ->
+ <<Sz:8, B/binary>>;
+ true ->
+ <<31:8, (e_uintvar(Sz))/binary, B/binary >>
+ end.
+
+e_value(B1,B2) ->
+ Sz = size(B1)+size(B2),
+ if Sz =< 30 ->
+ <<Sz:8, B1/binary, B2/binary>>;
+ true ->
+ <<31:8, (e_uintvar(Sz))/binary, B1/binary, B2/binary >>
+ end.
+
+e_value(B1,B2,B3) ->
+ Sz = size(B1)+size(B2)+size(B3),
+ if Sz =< 30 ->
+ <<Sz:8, B1/binary,B2/binary,B3/binary>>;
+ true ->
+ <<31:8,(e_uintvar(Sz))/binary,B1/binary,B2/binary,B3/binary>>
+ end.
+
+e_value(B1,B2,B3,B4) ->
+ Sz = size(B1)+size(B2)+size(B3)+size(B4),
+ if Sz =< 30 ->
+ <<Sz:8, B1/binary,B2/binary,B3/binary,B4/binary>>;
+ true ->
+ <<31:8,(e_uintvar(Sz))/binary,B1/binary,
+ B2/binary,B3/binary,B4/binary>>
+ end.
+
+%%
+%% Extened methods
+%%
+decode_extended_methods(<<PduType:8, Data/binary>>) ->
+ Type = decode_pdu_type(PduType),
+ {Method, Data1} = d_text_string(Data),
+ [{Type,Method} | decode_extended_methods(Data1)];
+decode_extended_methods(<<>>) ->
+ [].
+
+encode_extended_methods(Ms) ->
+ list_to_binary(encode_ext_methods(Ms)).
+
+encode_ext_methods([{Type,Method} | T]) ->
+ [ encode_pdu_type(Type), encode_text_string(Method) |
+ encode_ext_methods(T)];
+encode_ext_methods([]) ->
+ [].
+
+%%
+%% Address lists used by redirect-pdu and aliases-capability
+%%
+decode_address(D0) ->
+ [A] = decode_addresses(D0),
+ A.
+
+decode_addresses(D0) ->
+ case D0 of
+ <<1:1, 1:1,Len:6,B:8,P:16,Addr:Len/binary,D1/binary>> ->
+ [#wdp_address { bearer = B, address = Addr, portnum=P } |
+ decode_addresses(D1)];
+ <<1:1, 0:1,Len:6,B:8,Addr:Len/binary,D1/binary>> ->
+ [#wdp_address { bearer = B, address = Addr } |
+ decode_addresses(D1)];
+ <<0:1, 1:1,Len:6,P:16,Addr:Len/binary,D1/binary>> ->
+ [#wdp_address { portnum=P, address=Addr } |
+ decode_addresses(D1)];
+ <<0:1, 0:1,Len:6,Addr:Len/binary,D1/binary>> ->
+ [#wdp_address { address=Addr } |
+ decode_addresses(D1)];
+ <<>> ->
+ []
+ end.
+
+encode_addresses(As) ->
+ encode_addresses(As, []).
+
+encode_addresses([A|As], Acc) ->
+ encode_addresses(As, [encode_address(A)|Acc]);
+encode_addresses([], Acc) ->
+ list_to_binary(lists:reverse(Acc)).
+
+encode_address(#wdp_address { bearer = B, address = Addr, portnum = P }) ->
+ BAddr = if tuple(Addr) ->
+ list_to_binary(inet:ip_to_bytes(Addr));
+ binary(Addr) ->
+ Addr
+ end,
+ Len = size(BAddr),
+ if B == undefined, P == undefined ->
+ <<0:1, 0:1, Len:6, BAddr/binary>>;
+ B == undefined ->
+ <<0:1, 1:1, Len:6, P:16, BAddr/binary>>;
+ P == undefined ->
+ <<1:1, 0:1, Len:6, B:8, BAddr/binary>>;
+ true ->
+ <<1:1, 1:1, Len:6, B:8, P:16, BAddr/binary>>
+ end.
+
+
+
+
+-define(UNIX_TIME_OFFSET, 62167219200).
+
+d_date(Val) when integer(Val) ->
+ calendar:gregorian_seconds_to_datetime(Val+?UNIX_TIME_OFFSET);
+d_date({short,Data}) ->
+ Sz = size(Data)*8,
+ <<Sec:Sz>> = Data,
+ calendar:gregorian_seconds_to_datetime(Sec+?UNIX_TIME_OFFSET).
+
+e_date(DateTime) ->
+ Sec = calendar:datetime_to_gregorian_seconds(DateTime),
+ encode_long_integer(Sec - ?UNIX_TIME_OFFSET).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode http-date (RFC 2068). (MUST be send in RFC1123 date format)
+%% HTTP-date = rfc1123-date | rfc850-date | asctime-date
+%% rfc1123-date = wkday "," SP date1 SP time SP "GMT"
+%% rfc850-date = weekday "," SP date2 SP time SP "GMT"
+%% asctime-date = wkday SP date3 SP time SP 4DIGIT
+%%
+%% date1 = 2DIGIT SP month SP 4DIGIT
+%% ; day month year (e.g., 02 Jun 1982)
+%% date2 = 2DIGIT "-" month "-" 2DIGIT
+%% ; day-month-year (e.g., 02-Jun-82)
+%% date3 = month SP ( 2DIGIT | ( SP 1DIGIT ))
+%% ; month day (e.g., Jun 2)
+%%
+%% time = 2DIGIT ":" 2DIGIT ":" 2DIGIT
+%% ; 00:00:00 - 23:59:59
+%%
+%% wkday = "Mon" | "Tue" | "Wed"
+%% | "Thu" | "Fri" | "Sat" | "Sun"
+%%
+%%
+%% weekday = "Monday" | "Tuesday" | "Wednesday"
+%% | "Thursday" | "Friday" | "Saturday" | "Sunday"
+%%
+%% month = "Jan" | "Feb" | "Mar" | "Apr"
+%% | "May" | "Jun" | "Jul" | "Aug"
+%% | "Sep" | "Oct" | "Nov" | "Dec"
+%%
+%% decode date or crash!
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+parse_http_date(Date) ->
+ parse_hdate(tolower(Date)).
+
+parse_hdate([$m,$o,$n,$d,$a,$y,$ | Cs]) -> date2(Cs);
+parse_hdate([$t,$u,$e,$s,$d,$a,$y,$ | Cs]) -> date2(Cs);
+parse_hdate([$w,$e,$d,$n,$s,$d,$a,$y,$ | Cs]) -> date2(Cs);
+parse_hdate([$t,$h,$u,$r,$s,$d,$a,$y,$ | Cs]) -> date2(Cs);
+parse_hdate([$f,$r,$i,$d,$a,$y,$ | Cs]) -> date2(Cs);
+parse_hdate([$s,$a,$t,$u,$r,$d,$a,$y,$ | Cs]) -> date2(Cs);
+parse_hdate([$s,$u,$n,$d,$a,$y,$ | Cs]) -> date2(Cs);
+parse_hdate([$m,$o,$n,X | Cs]) -> date13(X,Cs);
+parse_hdate([$t,$u,$e,X | Cs]) -> date13(X,Cs);
+parse_hdate([$w,$e,$d,X | Cs]) -> date13(X,Cs);
+parse_hdate([$t,$h,$u,X | Cs]) -> date13(X,Cs);
+parse_hdate([$f,$r,$i,X | Cs]) -> date13(X,Cs);
+parse_hdate([$s,$a,$t,X | Cs]) -> date13(X,Cs);
+parse_hdate([$s,$u,$n,X | Cs]) -> date13(X,Cs).
+
+date13($ , Cs) -> date3(Cs);
+date13($,, [$ |Cs]) -> date1(Cs).
+
+%% date1
+date1([D1,D2,$ ,M1,M2,M3,$ ,Y1,Y2,Y3,Y4,$ | Cs]) ->
+ M = parse_month([M1,M2,M3]),
+ D = list_to_integer([D1,D2]),
+ Y = list_to_integer([Y1,Y2,Y3,Y4]),
+ {Time,[$ ,$g,$m,$t|Cs1]} = parse_time(Cs),
+ { {{Y,M,D},Time}, Cs1}.
+
+%% date2
+date2([D1,D2,$-,M1,M2,M3,$-,Y1,Y2 | Cs]) ->
+ M = parse_month([M1,M2,M3]),
+ D = list_to_integer([D1,D2]),
+ Y = 1900 + list_to_integer([Y1,Y2]),
+ {Time, [$ ,$g,$m,$t|Cs1]} = parse_time(Cs),
+ {{{Y,M,D}, Time}, Cs1}.
+
+%% date3
+date3([M1,M2,M3,$ ,D1,D2,$ | Cs]) ->
+ M = parse_month([M1,M2,M3]),
+ D = if D1 == $ -> list_to_integer([D2]);
+ true -> list_to_integer([D1,D2])
+ end,
+ {Time,[$ ,Y1,Y2,Y3,Y4|Cs1]} = parse_time(Cs),
+ Y = list_to_integer([Y1,Y2,Y3,Y4]),
+ { {{Y,M,D}, Time}, Cs1 }.
+
+%% decode lowercase month
+parse_month("jan") -> 1;
+parse_month("feb") -> 2;
+parse_month("mar") -> 3;
+parse_month("apr") -> 4;
+parse_month("may") -> 5;
+parse_month("jun") -> 6;
+parse_month("jul") -> 7;
+parse_month("aug") -> 8;
+parse_month("sep") -> 9;
+parse_month("oct") -> 10;
+parse_month("nov") -> 11;
+parse_month("dec") -> 12.
+
+%% decode time HH:MM:SS
+parse_time([H1,H2,$:,M1,M2,$:,S1,S2|Cs]) ->
+ { {list_to_integer([H1,H2]),
+ list_to_integer([M1,M2]),
+ list_to_integer([S1,S2]) }, Cs}.
+
+%% encode date into rfc1123-date (must be a GMT time!!!)
+fmt_date({{Y,M,D},{TH,TM,TS}}) ->
+ WkDay = case calendar:day_of_the_week({Y,M,D}) of
+ 1 -> "Mon";
+ 2 -> "Tue";
+ 3 -> "Wed";
+ 4 -> "Thu";
+ 5 -> "Fri";
+ 6 -> "Sat";
+ 7 -> "Sun"
+ end,
+ lists:flatten(io_lib:format("~s, ~2..0w ~s ~4..0w "
+ "~2..0w:~2..0w:~2..0w GMT",
+ [WkDay, D, fmt_month(M), Y, TH, TM, TS])).
+
+fmt_current_date() ->
+ fmt_date(calendar:universal_time()).
+
+%% decode lowercase month
+fmt_month(1) -> "Jan";
+fmt_month(2) -> "Feb";
+fmt_month(3) -> "Mar";
+fmt_month(4) -> "Apr";
+fmt_month(5) -> "May";
+fmt_month(6) -> "Jun";
+fmt_month(7) -> "Jul";
+fmt_month(8) -> "Aug";
+fmt_month(9) -> "Sep";
+fmt_month(10) -> "Oct";
+fmt_month(11) -> "Nov";
+fmt_month(12) -> "Dec".